diff --git a/.gitignore b/.gitignore index 0743ede..9f51e25 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,6 @@ \migrations \.venv .env -\myvenv +\myenv node_modules \ No newline at end of file diff --git a/myenv/Include/site/python3.12/greenlet/greenlet.h b/myenv/Include/site/python3.12/greenlet/greenlet.h deleted file mode 100644 index d02a16e..0000000 --- a/myenv/Include/site/python3.12/greenlet/greenlet.h +++ /dev/null @@ -1,164 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ - -/* Greenlet object interface */ - -#ifndef Py_GREENLETOBJECT_H -#define Py_GREENLETOBJECT_H - - -#include - -#ifdef __cplusplus -extern "C" { -#endif - -/* This is deprecated and undocumented. It does not change. */ -#define GREENLET_VERSION "1.0.0" - -#ifndef GREENLET_MODULE -#define implementation_ptr_t void* -#endif - -typedef struct _greenlet { - PyObject_HEAD - PyObject* weakreflist; - PyObject* dict; - implementation_ptr_t pimpl; -} PyGreenlet; - -#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) - - -/* C API functions */ - -/* Total number of symbols that are exported */ -#define PyGreenlet_API_pointers 12 - -#define PyGreenlet_Type_NUM 0 -#define PyExc_GreenletError_NUM 1 -#define PyExc_GreenletExit_NUM 2 - -#define PyGreenlet_New_NUM 3 -#define PyGreenlet_GetCurrent_NUM 4 -#define PyGreenlet_Throw_NUM 5 -#define PyGreenlet_Switch_NUM 6 -#define PyGreenlet_SetParent_NUM 7 - -#define PyGreenlet_MAIN_NUM 8 -#define PyGreenlet_STARTED_NUM 9 -#define PyGreenlet_ACTIVE_NUM 10 -#define PyGreenlet_GET_PARENT_NUM 11 - -#ifndef GREENLET_MODULE -/* This section is used by modules that uses the greenlet C API */ -static void** _PyGreenlet_API = NULL; - -# define PyGreenlet_Type \ - (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) - -# define PyExc_GreenletError \ - ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) - -# define PyExc_GreenletExit \ - ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) - -/* - * PyGreenlet_New(PyObject *args) - * - * greenlet.greenlet(run, parent=None) - */ -# define PyGreenlet_New \ - (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ - _PyGreenlet_API[PyGreenlet_New_NUM]) - -/* - * PyGreenlet_GetCurrent(void) - * - * greenlet.getcurrent() - */ -# define PyGreenlet_GetCurrent \ - (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) - -/* - * PyGreenlet_Throw( - * PyGreenlet *greenlet, - * PyObject *typ, - * PyObject *val, - * PyObject *tb) - * - * g.throw(...) - */ -# define PyGreenlet_Throw \ - (*(PyObject * (*)(PyGreenlet * self, \ - PyObject * typ, \ - PyObject * val, \ - PyObject * tb)) \ - _PyGreenlet_API[PyGreenlet_Throw_NUM]) - -/* - * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) - * - * g.switch(*args, **kwargs) - */ -# define PyGreenlet_Switch \ - (*(PyObject * \ - (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ - _PyGreenlet_API[PyGreenlet_Switch_NUM]) - -/* - * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) - * - * g.parent = new_parent - */ -# define PyGreenlet_SetParent \ - (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ - _PyGreenlet_API[PyGreenlet_SetParent_NUM]) - -/* - * PyGreenlet_GetParent(PyObject* greenlet) - * - * return greenlet.parent; - * - * This could return NULL even if there is no exception active. - * If it does not return NULL, you are responsible for decrementing the - * reference count. - */ -# define PyGreenlet_GetParent \ - (*(PyGreenlet* (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) - -/* - * deprecated, undocumented alias. - */ -# define PyGreenlet_GET_PARENT PyGreenlet_GetParent - -# define PyGreenlet_MAIN \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_MAIN_NUM]) - -# define PyGreenlet_STARTED \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_STARTED_NUM]) - -# define PyGreenlet_ACTIVE \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) - - - - -/* Macro that imports greenlet and initializes C API */ -/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we - keep the older definition to be sure older code that might have a copy of - the header still works. */ -# define PyGreenlet_Import() \ - { \ - _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ - } - -#endif /* GREENLET_MODULE */ - -#ifdef __cplusplus -} -#endif -#endif /* !Py_GREENLETOBJECT_H */ diff --git a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/INSTALLER b/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/LICENSE b/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/LICENSE deleted file mode 100644 index 237ce8a..0000000 --- a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/LICENSE +++ /dev/null @@ -1,31 +0,0 @@ -Copyright (c) 2011 by Max Countryman. - -Some rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -* Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above - copyright notice, this list of conditions and the following - disclaimer in the documentation and/or other materials provided - with the distribution. - -* The names of the contributors may not be used to endorse or - promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/METADATA b/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/METADATA deleted file mode 100644 index 7f0b386..0000000 --- a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/METADATA +++ /dev/null @@ -1,74 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask-Bcrypt -Version: 1.0.1 -Summary: Brcrypt hashing for Flask. -Home-page: https://github.com/maxcountryman/flask-bcrypt -Author: Max Countryman -Author-email: maxc@me.com -License: BSD -Platform: any -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Description-Content-Type: text/markdown -Requires-Dist: Flask -Requires-Dist: bcrypt (>=3.1.1) - -[![Tests](https://img.shields.io/github/workflow/status/maxcountryman/flask-bcrypt/Tests/master?label=tests)](https://github.com/maxcountryman/flask-bcrypt/actions) -[![Version](https://img.shields.io/pypi/v/Flask-Bcrypt.svg)](https://pypi.python.org/pypi/Flask-Bcrypt) -[![Supported Python Versions](https://img.shields.io/pypi/pyversions/Flask-Bcrypt.svg)](https://pypi.python.org/pypi/Flask-Bcrypt) - -# Flask-Bcrypt - -Flask-Bcrypt is a Flask extension that provides bcrypt hashing utilities for -your application. - -Due to the recent increased prevalence of powerful hardware, such as modern -GPUs, hashes have become increasingly easy to crack. A proactive solution to -this is to use a hash that was designed to be "de-optimized". Bcrypt is such -a hashing facility; unlike hashing algorithms such as MD5 and SHA1, which are -optimized for speed, bcrypt is intentionally structured to be slow. - -For sensitive data that must be protected, such as passwords, bcrypt is an -advisable choice. - -## Installation - -Install the extension with one of the following commands: - - $ easy_install flask-bcrypt - -or alternatively if you have pip installed: - - $ pip install flask-bcrypt - -## Usage - -To use the extension simply import the class wrapper and pass the Flask app -object back to here. Do so like this: - - from flask import Flask - from flask_bcrypt import Bcrypt - - app = Flask(__name__) - bcrypt = Bcrypt(app) - -Two primary hashing methods are now exposed by way of the bcrypt object. Use -them like so: - - pw_hash = bcrypt.generate_password_hash('hunter2') - bcrypt.check_password_hash(pw_hash, 'hunter2') # returns True - -## Documentation - -The Sphinx-compiled documentation is available here: https://flask-bcrypt.readthedocs.io/ - - diff --git a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/RECORD b/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/RECORD deleted file mode 100644 index e027368..0000000 --- a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/RECORD +++ /dev/null @@ -1,9 +0,0 @@ -Flask_Bcrypt-1.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Flask_Bcrypt-1.0.1.dist-info/LICENSE,sha256=RFRom0T_iGtIZZvvW5_AD14IAYQvR45h2hYe0Xp0Jak,1456 -Flask_Bcrypt-1.0.1.dist-info/METADATA,sha256=wO9naenfHK7Lgz41drDpI6BlMg_CpzjwGWsAiko2wsk,2615 -Flask_Bcrypt-1.0.1.dist-info/RECORD,, -Flask_Bcrypt-1.0.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Flask_Bcrypt-1.0.1.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92 -Flask_Bcrypt-1.0.1.dist-info/top_level.txt,sha256=HUgQw7e42Bb9jcMgo5popKpplnimwUQw3cyTi0K1N7o,13 -__pycache__/flask_bcrypt.cpython-312.pyc,, -flask_bcrypt.py,sha256=thnztmOYR9M6afp-bTRdSKsPef6R2cOFo4j_DD88-Ls,8856 diff --git a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/REQUESTED b/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/WHEEL b/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/WHEEL deleted file mode 100644 index becc9a6..0000000 --- a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/top_level.txt b/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/top_level.txt deleted file mode 100644 index f7f0e88..0000000 --- a/myenv/Lib/site-packages/Flask_Bcrypt-1.0.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask_bcrypt diff --git a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/INSTALLER b/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/LICENSE b/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/LICENSE deleted file mode 100644 index 0446381..0000000 --- a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/LICENSE +++ /dev/null @@ -1,22 +0,0 @@ -Copyright (c) 2011 Matthew Frazier - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/METADATA b/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/METADATA deleted file mode 100644 index 445a0b2..0000000 --- a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/METADATA +++ /dev/null @@ -1,183 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask-Login -Version: 0.6.3 -Summary: User authentication and session management for Flask. -Home-page: https://github.com/maxcountryman/flask-login -Author: Matthew Frazier -Author-email: leafstormrush@gmail.com -Maintainer: Max Countryman -License: MIT -Project-URL: Documentation, https://flask-login.readthedocs.io/ -Project-URL: Changes, https://github.com/maxcountryman/flask-login/blob/main/CHANGES.md -Project-URL: Source Code, https://github.com/maxcountryman/flask-login -Project-URL: Issue Tracker, https://github.com/maxcountryman/flask-login/issues -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Web Environment -Classifier: Framework :: Flask -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: Flask >=1.0.4 -Requires-Dist: Werkzeug >=1.0.1 - -# Flask-Login - -![Tests](https://github.com/maxcountryman/flask-login/workflows/Tests/badge.svg) -[![coverage](https://coveralls.io/repos/maxcountryman/flask-login/badge.svg?branch=main&service=github)](https://coveralls.io/github/maxcountryman/flask-login?branch=main) -[![Software License](https://img.shields.io/badge/license-MIT-brightgreen.svg)](LICENSE) - -Flask-Login provides user session management for Flask. It handles the common -tasks of logging in, logging out, and remembering your users' sessions over -extended periods of time. - -Flask-Login is not bound to any particular database system or permissions -model. The only requirement is that your user objects implement a few methods, -and that you provide a callback to the extension capable of loading users from -their ID. - -## Installation - -Install the extension with pip: - -```sh -$ pip install flask-login -``` - -## Usage - -Once installed, the Flask-Login is easy to use. Let's walk through setting up -a basic application. Also please note that this is a very basic guide: we will -be taking shortcuts here that you should never take in a real application. - -To begin we'll set up a Flask app: - -```python -import flask - -app = flask.Flask(__name__) -app.secret_key = 'super secret string' # Change this! -``` - -Flask-Login works via a login manager. To kick things off, we'll set up the -login manager by instantiating it and telling it about our Flask app: - -```python -import flask_login - -login_manager = flask_login.LoginManager() - -login_manager.init_app(app) -``` - -To keep things simple we're going to use a dictionary to represent a database -of users. In a real application, this would be an actual persistence layer. -However it's important to point out this is a feature of Flask-Login: it -doesn't care how your data is stored so long as you tell it how to retrieve it! - -```python -# Our mock database. -users = {'foo@bar.tld': {'password': 'secret'}} -``` - -We also need to tell Flask-Login how to load a user from a Flask request and -from its session. To do this we need to define our user object, a -`user_loader` callback, and a `request_loader` callback. - -```python -class User(flask_login.UserMixin): - pass - - -@login_manager.user_loader -def user_loader(email): - if email not in users: - return - - user = User() - user.id = email - return user - - -@login_manager.request_loader -def request_loader(request): - email = request.form.get('email') - if email not in users: - return - - user = User() - user.id = email - return user -``` - -Now we're ready to define our views. We can start with a login view, which will -populate the session with authentication bits. After that we can define a view -that requires authentication. - -```python -@app.route('/login', methods=['GET', 'POST']) -def login(): - if flask.request.method == 'GET': - return ''' -
- - - -
- ''' - - email = flask.request.form['email'] - if email in users and flask.request.form['password'] == users[email]['password']: - user = User() - user.id = email - flask_login.login_user(user) - return flask.redirect(flask.url_for('protected')) - - return 'Bad login' - - -@app.route('/protected') -@flask_login.login_required -def protected(): - return 'Logged in as: ' + flask_login.current_user.id -``` - -Finally we can define a view to clear the session and log users out: - -```python -@app.route('/logout') -def logout(): - flask_login.logout_user() - return 'Logged out' -``` - -We now have a basic working application that makes use of session-based -authentication. To round things off, we should provide a callback for login -failures: - -```python -@login_manager.unauthorized_handler -def unauthorized_handler(): - return 'Unauthorized', 401 -``` - -Documentation for Flask-Login is available on [ReadTheDocs](https://flask-login.readthedocs.io/en/latest/). -For complete understanding of available configuration, please refer to the [source code](https://github.com/maxcountryman/flask-login). - - -## Contributing - -We welcome contributions! If you would like to hack on Flask-Login, please -follow these steps: - -1. Fork this repository -2. Make your changes -3. Install the dev requirements with `pip install -r requirements/dev.txt` -4. Submit a pull request after running `tox` (ensure it does not error!) - -Please give us adequate time to review your submission. Thanks! diff --git a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/RECORD b/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/RECORD deleted file mode 100644 index d30aa6a..0000000 --- a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/RECORD +++ /dev/null @@ -1,23 +0,0 @@ -Flask_Login-0.6.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Flask_Login-0.6.3.dist-info/LICENSE,sha256=ep37nF2iBO0TcPO2LBPimSoS2h2nB_R-FWiX7rQ0Tls,1059 -Flask_Login-0.6.3.dist-info/METADATA,sha256=AUSHR5Po6-Cwmz1KBrAZbTzR-iVVFvtb2NQKYl7UuAU,5799 -Flask_Login-0.6.3.dist-info/RECORD,, -Flask_Login-0.6.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Flask_Login-0.6.3.dist-info/WHEEL,sha256=Xo9-1PvkuimrydujYJAjF7pCkriuXBpUPEjma1nZyJ0,92 -Flask_Login-0.6.3.dist-info/top_level.txt,sha256=OuXmIpiFnXLvW-iBbW2km7ZIy5EZvwSBnYaOC3Kt7j8,12 -flask_login/__about__.py,sha256=Kkp5e9mV9G7vK_FqZof-g9RFmyyBzq1gge5aKXgvilE,389 -flask_login/__init__.py,sha256=wYQiQCikT_Ndp3PhOD-1gRTGCrUPIE-FrjQUrT9aVAg,2681 -flask_login/__pycache__/__about__.cpython-312.pyc,, -flask_login/__pycache__/__init__.cpython-312.pyc,, -flask_login/__pycache__/config.cpython-312.pyc,, -flask_login/__pycache__/login_manager.cpython-312.pyc,, -flask_login/__pycache__/mixins.cpython-312.pyc,, -flask_login/__pycache__/signals.cpython-312.pyc,, -flask_login/__pycache__/test_client.cpython-312.pyc,, -flask_login/__pycache__/utils.cpython-312.pyc,, -flask_login/config.py,sha256=YAocv18La7YGQyNY5aT7rU1GQIZnX6pvchwqx3kA9p8,1813 -flask_login/login_manager.py,sha256=h20F_iv3mqc6rIJ4-V6_XookzOUl8Rcpasua-dCByQY,20073 -flask_login/mixins.py,sha256=gPd7otMRljxw0eUhUMbHsnEBc_jK2cYdxg5KFLuJcoI,1528 -flask_login/signals.py,sha256=xCMoFHKU1RTVt1NY-Gfl0OiVKpiyNt6YJw_PsgkjY3w,2464 -flask_login/test_client.py,sha256=6mrjiBRLGJpgvvFlLypXPTBLiMp0BAN-Ft-uogqC81g,517 -flask_login/utils.py,sha256=Y1wxjCVxpYohBaQJ0ADLypQ-VvBNycwG-gVXFF7k99I,14021 diff --git a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/REQUESTED b/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/WHEEL b/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/WHEEL deleted file mode 100644 index ba48cbc..0000000 --- a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.3) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/top_level.txt b/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/top_level.txt deleted file mode 100644 index 31514bd..0000000 --- a/myenv/Lib/site-packages/Flask_Login-0.6.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask_login diff --git a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/INSTALLER b/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/LICENSE b/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/LICENSE deleted file mode 100644 index 2448fd2..0000000 --- a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2013 Miguel Grinberg - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/METADATA b/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/METADATA deleted file mode 100644 index c0c67b9..0000000 --- a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/METADATA +++ /dev/null @@ -1,85 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask-Migrate -Version: 4.0.7 -Summary: SQLAlchemy database migrations for Flask applications using Alembic. -Author-email: Miguel Grinberg -License: MIT -Project-URL: Homepage, https://github.com/miguelgrinberg/flask-migrate -Project-URL: Bug Tracker, https://github.com/miguelgrinberg/flask-migrate/issues -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python :: 3 -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Requires-Python: >=3.6 -Description-Content-Type: text/markdown -License-File: LICENSE -Requires-Dist: Flask >=0.9 -Requires-Dist: Flask-SQLAlchemy >=1.0 -Requires-Dist: alembic >=1.9.0 - -Flask-Migrate -============= - -[![Build status](https://github.com/miguelgrinberg/flask-migrate/workflows/build/badge.svg)](https://github.com/miguelgrinberg/flask-migrate/actions) - -Flask-Migrate is an extension that handles SQLAlchemy database migrations for Flask applications using Alembic. The database operations are provided as command-line arguments under the `flask db` command. - -Installation ------------- - -Install Flask-Migrate with `pip`: - - pip install Flask-Migrate - -Example -------- - -This is an example application that handles database migrations through Flask-Migrate: - -```python -from flask import Flask -from flask_sqlalchemy import SQLAlchemy -from flask_migrate import Migrate - -app = Flask(__name__) -app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db' - -db = SQLAlchemy(app) -migrate = Migrate(app, db) - -class User(db.Model): - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(128)) -``` - -With the above application you can create the database or enable migrations if the database already exists with the following command: - - $ flask db init - -Note that the `FLASK_APP` environment variable must be set according to the Flask documentation for this command to work. This will add a `migrations` folder to your application. The contents of this folder need to be added to version control along with your other source files. - -You can then generate an initial migration: - - $ flask db migrate - -The migration script needs to be reviewed and edited, as Alembic currently does not detect every change you make to your models. In particular, Alembic is currently unable to detect indexes. Once finalized, the migration script also needs to be added to version control. - -Then you can apply the migration to the database: - - $ flask db upgrade - -Then each time the database models change repeat the `migrate` and `upgrade` commands. - -To sync the database in another system just refresh the `migrations` folder from source control and run the `upgrade` command. - -To see all the commands that are available run this command: - - $ flask db --help - -Resources ---------- - -- [Documentation](http://flask-migrate.readthedocs.io/en/latest/) -- [pypi](https://pypi.python.org/pypi/Flask-Migrate) -- [Change Log](https://github.com/miguelgrinberg/Flask-Migrate/blob/master/CHANGES.md) diff --git a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/RECORD b/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/RECORD deleted file mode 100644 index a1308c7..0000000 --- a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/RECORD +++ /dev/null @@ -1,31 +0,0 @@ -Flask_Migrate-4.0.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Flask_Migrate-4.0.7.dist-info/LICENSE,sha256=kfkXGlJQvKy3Y__6tAJ8ynIp1HQfeROXhL8jZU1d-DI,1082 -Flask_Migrate-4.0.7.dist-info/METADATA,sha256=3WW5StkAdKx66iP12BXfTzoUsSB4rqEGxVs3qoollRg,3101 -Flask_Migrate-4.0.7.dist-info/RECORD,, -Flask_Migrate-4.0.7.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -Flask_Migrate-4.0.7.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 -Flask_Migrate-4.0.7.dist-info/top_level.txt,sha256=jLoPgiMG6oR4ugNteXn3IHskVVIyIXVStZOVq-AWLdU,14 -flask_migrate/__init__.py,sha256=JMySGA55Y8Gxy3HviWu7qq5rPUNQBWc2NID2OicpDyw,10082 -flask_migrate/__pycache__/__init__.cpython-312.pyc,, -flask_migrate/__pycache__/cli.cpython-312.pyc,, -flask_migrate/cli.py,sha256=v1fOqjpUI8ZniSt0NAxdaU4gFMoZys5yLAofwmBdMHU,10689 -flask_migrate/templates/aioflask-multidb/README,sha256=Ek4cJqTaxneVjtkue--BXMlfpfp3MmJRjqoZvnSizww,43 -flask_migrate/templates/aioflask-multidb/__pycache__/env.cpython-312.pyc,, -flask_migrate/templates/aioflask-multidb/alembic.ini.mako,sha256=SjYEmJKzz6K8QfuZWtLJAJWcCKOdRbfUhsVlpgv8ock,857 -flask_migrate/templates/aioflask-multidb/env.py,sha256=UcjeqkAbyUjTkuQFmCFPG7QOvqhco8-uGp8QEbto0T8,6573 -flask_migrate/templates/aioflask-multidb/script.py.mako,sha256=198VPxVEN3NZ3vHcRuCxSoI4XnOYirGWt01qkbPKoJw,1246 -flask_migrate/templates/aioflask/README,sha256=KKqWGl4YC2RqdOdq-y6quTDW0b7D_UZNHuM8glM1L-c,44 -flask_migrate/templates/aioflask/__pycache__/env.cpython-312.pyc,, -flask_migrate/templates/aioflask/alembic.ini.mako,sha256=SjYEmJKzz6K8QfuZWtLJAJWcCKOdRbfUhsVlpgv8ock,857 -flask_migrate/templates/aioflask/env.py,sha256=m6ZtBhdpwuq89vVeLTWmNT-1NfJZqarC_hsquCdR9bw,3478 -flask_migrate/templates/aioflask/script.py.mako,sha256=8_xgA-gm_OhehnO7CiIijWgnm00ZlszEHtIHrAYFJl0,494 -flask_migrate/templates/flask-multidb/README,sha256=AfiP5foaV2odZxXxuUuSIS6YhkIpR7CsOo2mpuxwHdc,40 -flask_migrate/templates/flask-multidb/__pycache__/env.cpython-312.pyc,, -flask_migrate/templates/flask-multidb/alembic.ini.mako,sha256=SjYEmJKzz6K8QfuZWtLJAJWcCKOdRbfUhsVlpgv8ock,857 -flask_migrate/templates/flask-multidb/env.py,sha256=F44iqsAxLTVBN_zD8CMUkdE7Aub4niHMmo5wl9mY4Uw,6190 -flask_migrate/templates/flask-multidb/script.py.mako,sha256=198VPxVEN3NZ3vHcRuCxSoI4XnOYirGWt01qkbPKoJw,1246 -flask_migrate/templates/flask/README,sha256=JL0NrjOrscPcKgRmQh1R3hlv1_rohDot0TvpmdM27Jk,41 -flask_migrate/templates/flask/__pycache__/env.cpython-312.pyc,, -flask_migrate/templates/flask/alembic.ini.mako,sha256=SjYEmJKzz6K8QfuZWtLJAJWcCKOdRbfUhsVlpgv8ock,857 -flask_migrate/templates/flask/env.py,sha256=ibK1hsdOsOBzXNU2yQoAIza7f_EFzaVSWwON_NSpNzQ,3344 -flask_migrate/templates/flask/script.py.mako,sha256=8_xgA-gm_OhehnO7CiIijWgnm00ZlszEHtIHrAYFJl0,494 diff --git a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/REQUESTED b/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/WHEEL b/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/WHEEL deleted file mode 100644 index 98c0d20..0000000 --- a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.42.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/top_level.txt b/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/top_level.txt deleted file mode 100644 index 0652762..0000000 --- a/myenv/Lib/site-packages/Flask_Migrate-4.0.7.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -flask_migrate diff --git a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/INSTALLER b/myenv/Lib/site-packages/Mako-1.3.5.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/LICENSE b/myenv/Lib/site-packages/Mako-1.3.5.dist-info/LICENSE deleted file mode 100644 index 7cf3d43..0000000 --- a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright 2006-2024 the Mako authors and contributors . - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/METADATA b/myenv/Lib/site-packages/Mako-1.3.5.dist-info/METADATA deleted file mode 100644 index dd93876..0000000 --- a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/METADATA +++ /dev/null @@ -1,87 +0,0 @@ -Metadata-Version: 2.1 -Name: Mako -Version: 1.3.5 -Summary: A super-fast templating language that borrows the best ideas from the existing templating languages. -Home-page: https://www.makotemplates.org/ -Author: Mike Bayer -Author-email: mike@zzzcomputing.com -License: MIT -Project-URL: Documentation, https://docs.makotemplates.org -Project-URL: Issue Tracker, https://github.com/sqlalchemy/mako -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: MIT License -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: MarkupSafe >=0.9.2 -Provides-Extra: babel -Requires-Dist: Babel ; extra == 'babel' -Provides-Extra: lingua -Requires-Dist: lingua ; extra == 'lingua' -Provides-Extra: testing -Requires-Dist: pytest ; extra == 'testing' - -========================= -Mako Templates for Python -========================= - -Mako is a template library written in Python. It provides a familiar, non-XML -syntax which compiles into Python modules for maximum performance. Mako's -syntax and API borrows from the best ideas of many others, including Django -templates, Cheetah, Myghty, and Genshi. Conceptually, Mako is an embedded -Python (i.e. Python Server Page) language, which refines the familiar ideas -of componentized layout and inheritance to produce one of the most -straightforward and flexible models available, while also maintaining close -ties to Python calling and scoping semantics. - -Nutshell -======== - -:: - - <%inherit file="base.html"/> - <% - rows = [[v for v in range(0,10)] for row in range(0,10)] - %> - - % for row in rows: - ${makerow(row)} - % endfor -
- - <%def name="makerow(row)"> - - % for name in row: - ${name}\ - % endfor - - - -Philosophy -=========== - -Python is a great scripting language. Don't reinvent the wheel...your templates can handle it ! - -Documentation -============== - -See documentation for Mako at https://docs.makotemplates.org/en/latest/ - -License -======== - -Mako is licensed under an MIT-style license (see LICENSE). -Other incorporated projects may be licensed under different licenses. -All licenses allow for non-commercial and commercial use. diff --git a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/RECORD b/myenv/Lib/site-packages/Mako-1.3.5.dist-info/RECORD deleted file mode 100644 index a28c36f..0000000 --- a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/RECORD +++ /dev/null @@ -1,74 +0,0 @@ -../../Scripts/mako-render.exe,sha256=roV_qNW2byPIh1lFHC8hed0ZJEkqJ0pWZfzSHEaZLIk,108444 -Mako-1.3.5.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -Mako-1.3.5.dist-info/LICENSE,sha256=FWJ7NrONBynN1obfmr9gZQPZnWJLL17FyyVKddWvqJE,1098 -Mako-1.3.5.dist-info/METADATA,sha256=X8_j4ZLP792pwvQOY-Z3jsW-iRUrDGZn5miShY5GHZ4,2900 -Mako-1.3.5.dist-info/RECORD,, -Mako-1.3.5.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92 -Mako-1.3.5.dist-info/entry_points.txt,sha256=LsKkUsOsJQYbJ2M72hZCm968wi5K8Ywb5uFxCuN8Obk,512 -Mako-1.3.5.dist-info/top_level.txt,sha256=LItdH8cDPetpUu8rUyBG3DObS6h9Gcpr9j_WLj2S-R0,5 -mako/__init__.py,sha256=zzIKJsH_9dLwvqhk4aRiU6XKsK0Hhw37Gh3y1__ejQk,242 -mako/__pycache__/__init__.cpython-312.pyc,, -mako/__pycache__/_ast_util.cpython-312.pyc,, -mako/__pycache__/ast.cpython-312.pyc,, -mako/__pycache__/cache.cpython-312.pyc,, -mako/__pycache__/cmd.cpython-312.pyc,, -mako/__pycache__/codegen.cpython-312.pyc,, -mako/__pycache__/compat.cpython-312.pyc,, -mako/__pycache__/exceptions.cpython-312.pyc,, -mako/__pycache__/filters.cpython-312.pyc,, -mako/__pycache__/lexer.cpython-312.pyc,, -mako/__pycache__/lookup.cpython-312.pyc,, -mako/__pycache__/parsetree.cpython-312.pyc,, -mako/__pycache__/pygen.cpython-312.pyc,, -mako/__pycache__/pyparser.cpython-312.pyc,, -mako/__pycache__/runtime.cpython-312.pyc,, -mako/__pycache__/template.cpython-312.pyc,, -mako/__pycache__/util.cpython-312.pyc,, -mako/_ast_util.py,sha256=CenxCrdES1irHDhOQU6Ldta4rdsytfYaMkN6s0TlveM,20247 -mako/ast.py,sha256=pY7MH-5cLnUuVz5YAwoGhWgWfgoVvLQkRDtc_s9qqw0,6642 -mako/cache.py,sha256=5DBBorj1NqiWDqNhN3ZJ8tMCm-h6Mew541276kdsxAU,7680 -mako/cmd.py,sha256=vP5M5g9yc5sjAT5owVTQu056YwyS-YkpulFSDb0IMGw,2813 -mako/codegen.py,sha256=XRhzcuGEleDUXTfmOjw4alb6TkczbmEfBCLqID8x4bA,47736 -mako/compat.py,sha256=wjVMf7uMg0TlC_aI5hdwWizza99nqJuGNdrnTNrZbt0,1820 -mako/exceptions.py,sha256=pfdd5-1lCZ--I2YqQ_oHODZLmo62bn_lO5Kz_1__72w,12530 -mako/ext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -mako/ext/__pycache__/__init__.cpython-312.pyc,, -mako/ext/__pycache__/autohandler.cpython-312.pyc,, -mako/ext/__pycache__/babelplugin.cpython-312.pyc,, -mako/ext/__pycache__/beaker_cache.cpython-312.pyc,, -mako/ext/__pycache__/extract.cpython-312.pyc,, -mako/ext/__pycache__/linguaplugin.cpython-312.pyc,, -mako/ext/__pycache__/preprocessors.cpython-312.pyc,, -mako/ext/__pycache__/pygmentplugin.cpython-312.pyc,, -mako/ext/__pycache__/turbogears.cpython-312.pyc,, -mako/ext/autohandler.py,sha256=Tyz1CLRlG_C0EnKgjuHzqS4BBnpeA49O05x8rriNtTY,1885 -mako/ext/babelplugin.py,sha256=v10o5XQdgXwbr1bo0aL8VV3THm_84C_eeq6BmAGd3uA,2091 -mako/ext/beaker_cache.py,sha256=aAs9ELzO2WaiO5OppV4KDT6f7yNyn1BF1XHDQZwf0-E,2578 -mako/ext/extract.py,sha256=c3YuIN3Z5ZgS-xzX_gjKrEVQaptK3liXkm5j-Vq8yEM,4659 -mako/ext/linguaplugin.py,sha256=pzHlHC3-KlFeVAR4r8S1--_dfE5DcYmjLXtr0genBYU,1935 -mako/ext/preprocessors.py,sha256=zKQy42Ce6dOmU0Yk_rUVDAAn38-RUUfQolVKTJjLotA,576 -mako/ext/pygmentplugin.py,sha256=qBdsAhKktlQX7d5Yv1sAXufUNOZqcnJmKuC7V4D_srM,4753 -mako/ext/turbogears.py,sha256=0emY1WiMnuY8Pf6ARv5JBArKtouUdmuTljI-w6rE3J4,2141 -mako/filters.py,sha256=F7aDIKTUxnT-Og4rgboQtnML7Q87DJTHQyhi_dY_Ih4,4658 -mako/lexer.py,sha256=dtCZU1eoF3ymEdiCwCzEIw5SH0lgJkDsHJy9VHI_2XY,16324 -mako/lookup.py,sha256=rkMvT5T7EOS5KRvPtgYii-sjh1nWWyKok_mEk-cEzrM,12428 -mako/parsetree.py,sha256=7RNVRTsKcsMt8vU4NQi5C7e4vhdUyA9tqyd1yIkvAAQ,19007 -mako/pygen.py,sha256=d4f_ugRACCXuV9hJgEk6Ncoj38EaRHA3RTxkr_tK7UQ,10416 -mako/pyparser.py,sha256=eY_a94QDXaK3vIA2jZYT9so7oXKKJLT0SO_Yrl3IOb8,7478 -mako/runtime.py,sha256=ZsUEN22nX3d3dECQujF69mBKDQS6yVv2nvz_0eTvFGg,27804 -mako/template.py,sha256=4xQzwruZd5XzPw7iONZMZJj4SdFsctYYg4PfBYs2PLk,23857 -mako/testing/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -mako/testing/__pycache__/__init__.cpython-312.pyc,, -mako/testing/__pycache__/_config.cpython-312.pyc,, -mako/testing/__pycache__/assertions.cpython-312.pyc,, -mako/testing/__pycache__/config.cpython-312.pyc,, -mako/testing/__pycache__/exclusions.cpython-312.pyc,, -mako/testing/__pycache__/fixtures.cpython-312.pyc,, -mako/testing/__pycache__/helpers.cpython-312.pyc,, -mako/testing/_config.py,sha256=k-qpnsnbXUoN-ykMN5BRpg84i1x0p6UsAddKQnrIytU,3566 -mako/testing/assertions.py,sha256=pfbGl84QlW7QWGg3_lo3wP8XnBAVo9AjzNp2ajmn7FA,5161 -mako/testing/config.py,sha256=wmYVZfzGvOK3mJUZpzmgO8-iIgvaCH41Woi4yDpxq6E,323 -mako/testing/exclusions.py,sha256=_t6ADKdatk3f18tOfHV_ZY6u_ZwQsKphZ2MXJVSAOcI,1553 -mako/testing/fixtures.py,sha256=nEp7wTusf7E0n3Q-BHJW2s_t1vx0KB9poadQ1BmIJzE,3044 -mako/testing/helpers.py,sha256=z4HAactwlht4ut1cbvxKt1QLb3yLPk1U7cnh5BwVUlc,1623 -mako/util.py,sha256=dIFuchHfiNtRJJ99kEIRdHBkCZ3UmEvNO6l2ZQSCdVU,10638 diff --git a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/WHEEL b/myenv/Lib/site-packages/Mako-1.3.5.dist-info/WHEEL deleted file mode 100644 index bab98d6..0000000 --- a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.43.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/entry_points.txt b/myenv/Lib/site-packages/Mako-1.3.5.dist-info/entry_points.txt deleted file mode 100644 index 30f31b2..0000000 --- a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/entry_points.txt +++ /dev/null @@ -1,18 +0,0 @@ -[babel.extractors] -mako = mako.ext.babelplugin:extract [babel] - -[console_scripts] -mako-render = mako.cmd:cmdline - -[lingua.extractors] -mako = mako.ext.linguaplugin:LinguaMakoExtractor [lingua] - -[pygments.lexers] -css+mako = mako.ext.pygmentplugin:MakoCssLexer -html+mako = mako.ext.pygmentplugin:MakoHtmlLexer -js+mako = mako.ext.pygmentplugin:MakoJavascriptLexer -mako = mako.ext.pygmentplugin:MakoLexer -xml+mako = mako.ext.pygmentplugin:MakoXmlLexer - -[python.templating.engines] -mako = mako.ext.turbogears:TGPlugin diff --git a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/top_level.txt b/myenv/Lib/site-packages/Mako-1.3.5.dist-info/top_level.txt deleted file mode 100644 index 2951cdd..0000000 --- a/myenv/Lib/site-packages/Mako-1.3.5.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -mako diff --git a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/INSTALLER b/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/LICENSE.txt b/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/LICENSE.txt deleted file mode 100644 index 9d227a0..0000000 --- a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/METADATA b/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/METADATA deleted file mode 100644 index cbf0cfe..0000000 --- a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/METADATA +++ /dev/null @@ -1,92 +0,0 @@ -Metadata-Version: 2.1 -Name: MarkupSafe -Version: 3.0.1 -Summary: Safely add untrusted strings to HTML/XML markup. -Maintainer-email: Pallets -License: Copyright 2010 Pallets - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - 1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - - 3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED - TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://markupsafe.palletsprojects.com/ -Project-URL: Changes, https://markupsafe.palletsprojects.com/changes/ -Project-URL: Source, https://github.com/pallets/markupsafe/ -Project-URL: Chat, https://discord.gg/pallets -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Classifier: Typing :: Typed -Requires-Python: >=3.9 -Description-Content-Type: text/markdown -License-File: LICENSE.txt - -# MarkupSafe - -MarkupSafe implements a text object that escapes characters so it is -safe to use in HTML and XML. Characters that have special meanings are -replaced so that they display as the actual characters. This mitigates -injection attacks, meaning untrusted user input can safely be displayed -on a page. - - -## Examples - -```pycon ->>> from markupsafe import Markup, escape - ->>> # escape replaces special characters and wraps in Markup ->>> escape("") -Markup('<script>alert(document.cookie);</script>') - ->>> # wrap in Markup to mark text "safe" and prevent escaping ->>> Markup("Hello") -Markup('hello') - ->>> escape(Markup("Hello")) -Markup('hello') - ->>> # Markup is a str subclass ->>> # methods and operators escape their arguments ->>> template = Markup("Hello {name}") ->>> template.format(name='"World"') -Markup('Hello "World"') -``` - -## Donate - -The Pallets organization develops and supports MarkupSafe and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -[please donate today][]. - -[please donate today]: https://palletsprojects.com/donate diff --git a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/RECORD b/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/RECORD deleted file mode 100644 index e8952eb..0000000 --- a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -MarkupSafe-3.0.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -MarkupSafe-3.0.1.dist-info/LICENSE.txt,sha256=RjHsDbX9kKVH4zaBcmTGeYIUM4FG-KyUtKV_lu6MnsQ,1503 -MarkupSafe-3.0.1.dist-info/METADATA,sha256=pi1Lhtv-cKr98ycN2-x5_52R9u1KnK0tHbbUXofV68s,4067 -MarkupSafe-3.0.1.dist-info/RECORD,, -MarkupSafe-3.0.1.dist-info/WHEEL,sha256=3vidnDuZ-QSnHIxLhNbI1gIM-KgyEcMHuZuv1mWPd_Q,101 -MarkupSafe-3.0.1.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11 -markupsafe/__init__.py,sha256=pREerPwvinB62tNCMOwqxBS2YHV6R52Wcq1d-rB4Z5o,13609 -markupsafe/__pycache__/__init__.cpython-312.pyc,, -markupsafe/__pycache__/_native.cpython-312.pyc,, -markupsafe/_native.py,sha256=2ptkJ40yCcp9kq3L1NqpgjfpZB-obniYKFFKUOkHh4Q,218 -markupsafe/_speedups.c,sha256=fPfDHioeam7ZsWxJdSbKK2i8YM-3sSTe6W66IBsf_Gw,4358 -markupsafe/_speedups.cp312-win_amd64.pyd,sha256=HY1cH11Rowf472u4iKXH1xk8YH5cRcCewPQYeJu3-LA,13312 -markupsafe/_speedups.pyi,sha256=LSDmXYOefH4HVpAXuL8sl7AttLw0oXh1njVoVZp2wqQ,42 -markupsafe/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/WHEEL b/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/WHEEL deleted file mode 100644 index e4b52df..0000000 --- a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.1.0) -Root-Is-Purelib: false -Tag: cp312-cp312-win_amd64 - diff --git a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/top_level.txt b/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/top_level.txt deleted file mode 100644 index 75bf729..0000000 --- a/myenv/Lib/site-packages/MarkupSafe-3.0.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -markupsafe diff --git a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/INSTALLER b/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/LICENSE b/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/LICENSE deleted file mode 100644 index 967cdc5..0000000 --- a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright 2005-2024 SQLAlchemy authors and contributors . - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/METADATA b/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/METADATA deleted file mode 100644 index 20b2b3b..0000000 --- a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/METADATA +++ /dev/null @@ -1,242 +0,0 @@ -Metadata-Version: 2.1 -Name: SQLAlchemy -Version: 2.0.35 -Summary: Database Abstraction Library -Home-page: https://www.sqlalchemy.org -Author: Mike Bayer -Author-email: mike_mp@zzzcomputing.com -License: MIT -Project-URL: Documentation, https://docs.sqlalchemy.org -Project-URL: Issue Tracker, https://github.com/sqlalchemy/sqlalchemy/ -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Database :: Front-Ends -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: typing-extensions >=4.6.0 -Requires-Dist: greenlet !=0.4.17 ; python_version < "3.13" and (platform_machine == "aarch64" or (platform_machine == "ppc64le" or (platform_machine == "x86_64" or (platform_machine == "amd64" or (platform_machine == "AMD64" or (platform_machine == "win32" or platform_machine == "WIN32")))))) -Requires-Dist: importlib-metadata ; python_version < "3.8" -Provides-Extra: aiomysql -Requires-Dist: greenlet !=0.4.17 ; extra == 'aiomysql' -Requires-Dist: aiomysql >=0.2.0 ; extra == 'aiomysql' -Provides-Extra: aioodbc -Requires-Dist: greenlet !=0.4.17 ; extra == 'aioodbc' -Requires-Dist: aioodbc ; extra == 'aioodbc' -Provides-Extra: aiosqlite -Requires-Dist: greenlet !=0.4.17 ; extra == 'aiosqlite' -Requires-Dist: aiosqlite ; extra == 'aiosqlite' -Requires-Dist: typing-extensions !=3.10.0.1 ; extra == 'aiosqlite' -Provides-Extra: asyncio -Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncio' -Provides-Extra: asyncmy -Requires-Dist: greenlet !=0.4.17 ; extra == 'asyncmy' -Requires-Dist: asyncmy !=0.2.4,!=0.2.6,>=0.2.3 ; extra == 'asyncmy' -Provides-Extra: mariadb_connector -Requires-Dist: mariadb !=1.1.2,!=1.1.5,>=1.0.1 ; extra == 'mariadb_connector' -Provides-Extra: mssql -Requires-Dist: pyodbc ; extra == 'mssql' -Provides-Extra: mssql_pymssql -Requires-Dist: pymssql ; extra == 'mssql_pymssql' -Provides-Extra: mssql_pyodbc -Requires-Dist: pyodbc ; extra == 'mssql_pyodbc' -Provides-Extra: mypy -Requires-Dist: mypy >=0.910 ; extra == 'mypy' -Provides-Extra: mysql -Requires-Dist: mysqlclient >=1.4.0 ; extra == 'mysql' -Provides-Extra: mysql_connector -Requires-Dist: mysql-connector-python ; extra == 'mysql_connector' -Provides-Extra: oracle -Requires-Dist: cx-oracle >=8 ; extra == 'oracle' -Provides-Extra: oracle_oracledb -Requires-Dist: oracledb >=1.0.1 ; extra == 'oracle_oracledb' -Provides-Extra: postgresql -Requires-Dist: psycopg2 >=2.7 ; extra == 'postgresql' -Provides-Extra: postgresql_asyncpg -Requires-Dist: greenlet !=0.4.17 ; extra == 'postgresql_asyncpg' -Requires-Dist: asyncpg ; extra == 'postgresql_asyncpg' -Provides-Extra: postgresql_pg8000 -Requires-Dist: pg8000 >=1.29.1 ; extra == 'postgresql_pg8000' -Provides-Extra: postgresql_psycopg -Requires-Dist: psycopg >=3.0.7 ; extra == 'postgresql_psycopg' -Provides-Extra: postgresql_psycopg2binary -Requires-Dist: psycopg2-binary ; extra == 'postgresql_psycopg2binary' -Provides-Extra: postgresql_psycopg2cffi -Requires-Dist: psycopg2cffi ; extra == 'postgresql_psycopg2cffi' -Provides-Extra: postgresql_psycopgbinary -Requires-Dist: psycopg[binary] >=3.0.7 ; extra == 'postgresql_psycopgbinary' -Provides-Extra: pymysql -Requires-Dist: pymysql ; extra == 'pymysql' -Provides-Extra: sqlcipher -Requires-Dist: sqlcipher3-binary ; extra == 'sqlcipher' - -SQLAlchemy -========== - -|PyPI| |Python| |Downloads| - -.. |PyPI| image:: https://img.shields.io/pypi/v/sqlalchemy - :target: https://pypi.org/project/sqlalchemy - :alt: PyPI - -.. |Python| image:: https://img.shields.io/pypi/pyversions/sqlalchemy - :target: https://pypi.org/project/sqlalchemy - :alt: PyPI - Python Version - -.. |Downloads| image:: https://static.pepy.tech/badge/sqlalchemy/month - :target: https://pepy.tech/project/sqlalchemy - :alt: PyPI - Downloads - - -The Python SQL Toolkit and Object Relational Mapper - -Introduction -------------- - -SQLAlchemy is the Python SQL toolkit and Object Relational Mapper -that gives application developers the full power and -flexibility of SQL. SQLAlchemy provides a full suite -of well known enterprise-level persistence patterns, -designed for efficient and high-performing database -access, adapted into a simple and Pythonic domain -language. - -Major SQLAlchemy features include: - -* An industrial strength ORM, built - from the core on the identity map, unit of work, - and data mapper patterns. These patterns - allow transparent persistence of objects - using a declarative configuration system. - Domain models - can be constructed and manipulated naturally, - and changes are synchronized with the - current transaction automatically. -* A relationally-oriented query system, exposing - the full range of SQL's capabilities - explicitly, including joins, subqueries, - correlation, and most everything else, - in terms of the object model. - Writing queries with the ORM uses the same - techniques of relational composition you use - when writing SQL. While you can drop into - literal SQL at any time, it's virtually never - needed. -* A comprehensive and flexible system - of eager loading for related collections and objects. - Collections are cached within a session, - and can be loaded on individual access, all - at once using joins, or by query per collection - across the full result set. -* A Core SQL construction system and DBAPI - interaction layer. The SQLAlchemy Core is - separate from the ORM and is a full database - abstraction layer in its own right, and includes - an extensible Python-based SQL expression - language, schema metadata, connection pooling, - type coercion, and custom types. -* All primary and foreign key constraints are - assumed to be composite and natural. Surrogate - integer primary keys are of course still the - norm, but SQLAlchemy never assumes or hardcodes - to this model. -* Database introspection and generation. Database - schemas can be "reflected" in one step into - Python structures representing database metadata; - those same structures can then generate - CREATE statements right back out - all within - the Core, independent of the ORM. - -SQLAlchemy's philosophy: - -* SQL databases behave less and less like object - collections the more size and performance start to - matter; object collections behave less and less like - tables and rows the more abstraction starts to matter. - SQLAlchemy aims to accommodate both of these - principles. -* An ORM doesn't need to hide the "R". A relational - database provides rich, set-based functionality - that should be fully exposed. SQLAlchemy's - ORM provides an open-ended set of patterns - that allow a developer to construct a custom - mediation layer between a domain model and - a relational schema, turning the so-called - "object relational impedance" issue into - a distant memory. -* The developer, in all cases, makes all decisions - regarding the design, structure, and naming conventions - of both the object model as well as the relational - schema. SQLAlchemy only provides the means - to automate the execution of these decisions. -* With SQLAlchemy, there's no such thing as - "the ORM generated a bad query" - you - retain full control over the structure of - queries, including how joins are organized, - how subqueries and correlation is used, what - columns are requested. Everything SQLAlchemy - does is ultimately the result of a developer-initiated - decision. -* Don't use an ORM if the problem doesn't need one. - SQLAlchemy consists of a Core and separate ORM - component. The Core offers a full SQL expression - language that allows Pythonic construction - of SQL constructs that render directly to SQL - strings for a target database, returning - result sets that are essentially enhanced DBAPI - cursors. -* Transactions should be the norm. With SQLAlchemy's - ORM, nothing goes to permanent storage until - commit() is called. SQLAlchemy encourages applications - to create a consistent means of delineating - the start and end of a series of operations. -* Never render a literal value in a SQL statement. - Bound parameters are used to the greatest degree - possible, allowing query optimizers to cache - query plans effectively and making SQL injection - attacks a non-issue. - -Documentation -------------- - -Latest documentation is at: - -https://www.sqlalchemy.org/docs/ - -Installation / Requirements ---------------------------- - -Full documentation for installation is at -`Installation `_. - -Getting Help / Development / Bug reporting ------------------------------------------- - -Please refer to the `SQLAlchemy Community Guide `_. - -Code of Conduct ---------------- - -Above all, SQLAlchemy places great emphasis on polite, thoughtful, and -constructive communication between users and developers. -Please see our current Code of Conduct at -`Code of Conduct `_. - -License -------- - -SQLAlchemy is distributed under the `MIT license -`_. - diff --git a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/RECORD b/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/RECORD deleted file mode 100644 index 8a82d99..0000000 --- a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/RECORD +++ /dev/null @@ -1,529 +0,0 @@ -SQLAlchemy-2.0.35.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -SQLAlchemy-2.0.35.dist-info/LICENSE,sha256=eYQKk6tEYK_iQW6ePf95YIdsg66dK-JwXoOhBNSXQOs,1119 -SQLAlchemy-2.0.35.dist-info/METADATA,sha256=X_nxEyH7UmQgJFAM-HC7_kyrWrIwhCVHMeUJF7WT-40,9874 -SQLAlchemy-2.0.35.dist-info/RECORD,, -SQLAlchemy-2.0.35.dist-info/WHEEL,sha256=3vidnDuZ-QSnHIxLhNbI1gIM-KgyEcMHuZuv1mWPd_Q,101 -SQLAlchemy-2.0.35.dist-info/top_level.txt,sha256=rp-ZgB7D8G11ivXON5VGPjupT1voYmWqkciDt5Uaw_Q,11 -sqlalchemy/__init__.py,sha256=Eti3aIAtGo87_lG9ANCWEOn39h0dnAyzp0LCDuIS9rA,13327 -sqlalchemy/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/__pycache__/events.cpython-312.pyc,, -sqlalchemy/__pycache__/exc.cpython-312.pyc,, -sqlalchemy/__pycache__/inspection.cpython-312.pyc,, -sqlalchemy/__pycache__/log.cpython-312.pyc,, -sqlalchemy/__pycache__/schema.cpython-312.pyc,, -sqlalchemy/__pycache__/types.cpython-312.pyc,, -sqlalchemy/connectors/__init__.py,sha256=A2AI8p63aT0jT5CsVX33xlTfiGWliOcGahlK0RyTLXg,494 -sqlalchemy/connectors/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/connectors/__pycache__/aioodbc.cpython-312.pyc,, -sqlalchemy/connectors/__pycache__/asyncio.cpython-312.pyc,, -sqlalchemy/connectors/__pycache__/pyodbc.cpython-312.pyc,, -sqlalchemy/connectors/aioodbc.py,sha256=fg3xfG-5gLsy-DSyVonNNKYhOf0_lzHmixRFa5edtWI,5462 -sqlalchemy/connectors/asyncio.py,sha256=EWjnlej7KeY7kDep36_ayJKl06_YnnOPWtUQKBZhgsY,6351 -sqlalchemy/connectors/pyodbc.py,sha256=IG5lLCyFbnv1wB85HQuMO3S5piWHaB660OBWvBIQhbg,8750 -sqlalchemy/cyextension/__init__.py,sha256=Hlfk91RinbOuNF_fybR5R2UtiIcTeUOXS66QOfSSCV0,250 -sqlalchemy/cyextension/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/cyextension/collections.cp312-win_amd64.pyd,sha256=8jTEI_bnx2p1dmrSoZw9CuqMKCjBSMwwG8T_FrtM5pg,176640 -sqlalchemy/cyextension/collections.pyx,sha256=GXPkr9cHRLW3Vcu-ik3dVBZMR-zf0Q5_K4J-_8yV-gk,12980 -sqlalchemy/cyextension/immutabledict.cp312-win_amd64.pyd,sha256=3OL5tmT7EgPouJCTrAWeWU6oMlj6gu3VXHtgK0FZc_g,72704 -sqlalchemy/cyextension/immutabledict.pxd,sha256=5iGndSbJCgCkNmRbJ_z14RANs2dSSnAzyiRPUTBk58Y,299 -sqlalchemy/cyextension/immutabledict.pyx,sha256=IhB2pR49CrORXQ3LXMFpuCIRc6I08QNvIylE1cPQA5o,3668 -sqlalchemy/cyextension/processors.cp312-win_amd64.pyd,sha256=lAg4_McnZus48hUlqJOqi_0B7oriGUVRrlH0Ndhe5xU,59392 -sqlalchemy/cyextension/processors.pyx,sha256=V9gzqXiNHWsa5DBgYl-3KzclFHY8kXGF_TD1xHFE7eM,1860 -sqlalchemy/cyextension/resultproxy.cp312-win_amd64.pyd,sha256=kJ6zg8Mwhbk4S6Vy7i_0Lbr3IdMwnXIFQsW4iXZxHZg,61952 -sqlalchemy/cyextension/resultproxy.pyx,sha256=h_RrKasbLtKK3LqUh6UiWtkumBlKtcN5eeB_1bZROMA,2827 -sqlalchemy/cyextension/util.cp312-win_amd64.pyd,sha256=xtp1INkRNjQO8zsqindstAYfWjlj1C8z-zGKRrBqe7I,73728 -sqlalchemy/cyextension/util.pyx,sha256=50QYpSAKgLSUfhFEQgSN2e1qHWCMh_b6ZNlErDUS7ec,2621 -sqlalchemy/dialects/__init__.py,sha256=SJfQyxMhOL58EB-S6GQv_0jf2oP7MMfmVdlV2UxGWQo,1831 -sqlalchemy/dialects/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/dialects/__pycache__/_typing.cpython-312.pyc,, -sqlalchemy/dialects/_typing.py,sha256=mN2r8mU8z-mRh4YS3VeK8Nv_IKJmE0Mb1CrJ-ptILas,913 -sqlalchemy/dialects/mssql/__init__.py,sha256=r3oTfX2LLbJAGhM57wdPLWxaZBzunkcmyaTbW0FjLuY,1968 -sqlalchemy/dialects/mssql/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/dialects/mssql/__pycache__/aioodbc.cpython-312.pyc,, -sqlalchemy/dialects/mssql/__pycache__/base.cpython-312.pyc,, -sqlalchemy/dialects/mssql/__pycache__/information_schema.cpython-312.pyc,, -sqlalchemy/dialects/mssql/__pycache__/json.cpython-312.pyc,, -sqlalchemy/dialects/mssql/__pycache__/provision.cpython-312.pyc,, -sqlalchemy/dialects/mssql/__pycache__/pymssql.cpython-312.pyc,, -sqlalchemy/dialects/mssql/__pycache__/pyodbc.cpython-312.pyc,, -sqlalchemy/dialects/mssql/aioodbc.py,sha256=b9bhUKcVj4NzoqJIDfECeE_Rmt51sRy8OOUFz_R3vpg,2086 -sqlalchemy/dialects/mssql/base.py,sha256=JIqq10N0cb_sOM94JCpWnkqB9tOOj2eSBjer4CNlhBw,136458 -sqlalchemy/dialects/mssql/information_schema.py,sha256=A1UJAoFb3UtE8YCY3heBgeTMkzWq3j7C2caZ3gcMGZk,8338 -sqlalchemy/dialects/mssql/json.py,sha256=nZVVsgmR4Z4dNn9cv5Gucq596gsQ0MvASPuEEtz-Gek,4949 -sqlalchemy/dialects/mssql/provision.py,sha256=jpyErAQ_zgmQKiE0G8dQcNn6O8ssQELrO2BzcukLUa0,5755 -sqlalchemy/dialects/mssql/pymssql.py,sha256=RrxWm94UgCipZCM8FbeeENFndJh6JDaSkWPF3feK_VI,4223 -sqlalchemy/dialects/mssql/pyodbc.py,sha256=YVI19AnrqxPCBwDqcjrO_rqUUWbV2re7E8iLuV1ilqE,27801 -sqlalchemy/dialects/mysql/__init__.py,sha256=PPQDwNqcpxWMt3nFQ66KefX9T9iz7d8lybEwKlfXB1U,2254 -sqlalchemy/dialects/mysql/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/aiomysql.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/asyncmy.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/base.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/cymysql.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/dml.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/enumerated.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/expression.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/json.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/mariadb.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/mariadbconnector.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/mysqlconnector.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/mysqldb.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/provision.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/pymysql.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/pyodbc.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/reflection.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/reserved_words.cpython-312.pyc,, -sqlalchemy/dialects/mysql/__pycache__/types.cpython-312.pyc,, -sqlalchemy/dialects/mysql/aiomysql.py,sha256=hZg2W3C_O2ExL5bVlcqmDX0vZbVR2dMyuGqTGVlBzig,10332 -sqlalchemy/dialects/mysql/asyncmy.py,sha256=xnYyNzk6JJVzxpxu4B7GhUP9CR5rtHVw8mGoWsHAwT8,10404 -sqlalchemy/dialects/mysql/base.py,sha256=uvxlbbs_gHrUeWoBUxwtVcFn8Q_FAeXZM2Xgy-NG8iY,124435 -sqlalchemy/dialects/mysql/cymysql.py,sha256=0mRP3gFe2t7iJYQqJz1Os_TztFwMAF34w2MmXe-4B_w,2384 -sqlalchemy/dialects/mysql/dml.py,sha256=n31-m4vfOIL0MdHpUdIfTLgaMzusfQ-yHYoJWO_ndEc,7864 -sqlalchemy/dialects/mysql/enumerated.py,sha256=Nz9Sv3ENX-1T18aEoOY8QfZlAcwRf65lIOse7vwjil8,8692 -sqlalchemy/dialects/mysql/expression.py,sha256=uxD1fICubfGh8BhAn6WoeS8AF6hAVEvreDShXqRZTqM,4238 -sqlalchemy/dialects/mysql/json.py,sha256=i0Lrd_7VKTd3fNm6kQKzrtPERuW0JeSw7XSUWnl1HQI,2350 -sqlalchemy/dialects/mysql/mariadb.py,sha256=WoNxkjiPfIbWAkrVEU9MTM7mePeLHZ2uiJsyfvcpv1s,885 -sqlalchemy/dialects/mysql/mariadbconnector.py,sha256=wRuM0PPLnK1avjQnvbgC7pLf54JTondEpi-sffDWPMM,8900 -sqlalchemy/dialects/mysql/mysqlconnector.py,sha256=w4a36lSTM8JHgohUoOsjqqaWh3822207gGUFGovkSRo,5909 -sqlalchemy/dialects/mysql/mysqldb.py,sha256=2aFRw8C0LdU7hk6zjXDijEY8UeWK360TB8pTMZ5Ddto,9806 -sqlalchemy/dialects/mysql/provision.py,sha256=ikb3a6XUnMa51JLmlIbDr9QLvTs3QtdyldHn-lME-qI,3685 -sqlalchemy/dialects/mysql/pymysql.py,sha256=Kxi_A34-nbQ5UEFSmy14TXc1v43-1SZ8gE628REGTFo,4220 -sqlalchemy/dialects/mysql/pyodbc.py,sha256=CZCEnhyLIgbuiAW32Cw7N1m1aiQv1eBB34pV-txOs70,4435 -sqlalchemy/dialects/mysql/reflection.py,sha256=HQOQbNu9o977ki2Xpb7AbZ_mhAR9EK3YOGp3HoezJxg,23511 -sqlalchemy/dialects/mysql/reserved_words.py,sha256=qzej7CIVFz2Q2ywue7nKL59cca2kzXhKpDOSQYMlxjU,9829 -sqlalchemy/dialects/mysql/types.py,sha256=wqfI5QZ8__Uzn9cYefTMZ387cJJJgbNkCDE9Ax2k1pY,25117 -sqlalchemy/dialects/oracle/__init__.py,sha256=_yFT_k0R6yc7MKQG-Al9QZt8wYZsiCtpkhNlba5xqn8,1560 -sqlalchemy/dialects/oracle/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/dialects/oracle/__pycache__/base.cpython-312.pyc,, -sqlalchemy/dialects/oracle/__pycache__/cx_oracle.cpython-312.pyc,, -sqlalchemy/dialects/oracle/__pycache__/dictionary.cpython-312.pyc,, -sqlalchemy/dialects/oracle/__pycache__/oracledb.cpython-312.pyc,, -sqlalchemy/dialects/oracle/__pycache__/provision.cpython-312.pyc,, -sqlalchemy/dialects/oracle/__pycache__/types.cpython-312.pyc,, -sqlalchemy/dialects/oracle/base.py,sha256=h_ofqAxmm0N6YyZkwaeQyWZgIhRlIW2oTDWUWlMvR2c,122971 -sqlalchemy/dialects/oracle/cx_oracle.py,sha256=5DyLekZ1IV1-KQdtMHEyt-AtHDq1RLX-XN-N6zPEywA,56718 -sqlalchemy/dialects/oracle/dictionary.py,sha256=tmAZLEACqBAPBE0SEV2jr1R4aPcpNOrbomJl-UmgiR4,20026 -sqlalchemy/dialects/oracle/oracledb.py,sha256=nWB80zaRoUl9uJ3rjvCnvifeqfNiCzFmDi6uSr6wEkk,14050 -sqlalchemy/dialects/oracle/provision.py,sha256=KKlXDQnC8n6BjLJWA7AJg3lwXluH1OyStqfP2Uf9rq0,8524 -sqlalchemy/dialects/oracle/types.py,sha256=U9EReFRcr0PiwOxT9vg2cA7WOix8LQ2sVp0gRkMHcPo,8518 -sqlalchemy/dialects/postgresql/__init__.py,sha256=C0BhKzUkClwGfAetbBIEd4KQohOtSmfHo5TwhoZLCK0,4059 -sqlalchemy/dialects/postgresql/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/_psycopg_common.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/array.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/asyncpg.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/base.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/dml.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/ext.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/hstore.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/json.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/named_types.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/operators.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/pg8000.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/pg_catalog.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/provision.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/psycopg.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/psycopg2.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/psycopg2cffi.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/ranges.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/__pycache__/types.cpython-312.pyc,, -sqlalchemy/dialects/postgresql/_psycopg_common.py,sha256=fYFqLVxNxAqh3nOvzGOv3Pfpm2BsclHrk71MJZrpJKo,5883 -sqlalchemy/dialects/postgresql/array.py,sha256=_vzfyGBY1NsT6blooCgHrLC38VZbM4UWKQHgXLmmyYs,14159 -sqlalchemy/dialects/postgresql/asyncpg.py,sha256=p5mSOtBNSudNgBXaKQChh1S0XJ8XeVeOc1u25iWvm94,42410 -sqlalchemy/dialects/postgresql/base.py,sha256=wS7qpm6T_MpcV5wlcPPAxFun75wOshKq9eXqLRqeAdQ,183996 -sqlalchemy/dialects/postgresql/dml.py,sha256=uMiqxEkji-UXqk8gO1ramQEvEfCugYmy8Cv1cnG7DQs,11522 -sqlalchemy/dialects/postgresql/ext.py,sha256=ct6NQfMAfBnLYhybpF2wPEq-p8-U0tEpy-aq8NwqJLw,16758 -sqlalchemy/dialects/postgresql/hstore.py,sha256=4jAZQMPWl3VE4weDRZrgrbVDRZJTM3X0Xj4twr5znYQ,11938 -sqlalchemy/dialects/postgresql/json.py,sha256=Pwi13ENmp8xjdJuQoRkL6wjSNXzbQ7rzTsvHP88DGcs,11922 -sqlalchemy/dialects/postgresql/named_types.py,sha256=Ykl4GWSf5pQynkAWfZsAjgYU0R_TSvEvaZrb8mI4PuQ,18103 -sqlalchemy/dialects/postgresql/operators.py,sha256=iyZuyx_daRyJjiS5rw-XnZlaWj1bmRiHdy5MXzBrFZw,2937 -sqlalchemy/dialects/postgresql/pg8000.py,sha256=TPJXX078vW0FSwZ-DlWNkEOXg7Z4xk8IFwi1droMhPw,19302 -sqlalchemy/dialects/postgresql/pg_catalog.py,sha256=rG_AGLtjSQ6DAnkqAiurYpnIuLhN9Ib_QydWbmjK--s,9554 -sqlalchemy/dialects/postgresql/provision.py,sha256=8ieCPOOsTLLcf2qa4zasAu7UCJ9S7rK5xl2_u_OkB2w,5945 -sqlalchemy/dialects/postgresql/psycopg.py,sha256=k6gog6T8P6K2Spmj8N9GdQil2Zpb2ZnW0ZWjfe4lxbU,24002 -sqlalchemy/dialects/postgresql/psycopg2.py,sha256=Jczz-5NGNYlkmkDlJYCpXRQSmNszqgZZaX6yqwc0vvg,32884 -sqlalchemy/dialects/postgresql/psycopg2cffi.py,sha256=hFg-9GH08ApPy3foVPUdJKwCEzNSv2zD5l4nH97AqgI,1817 -sqlalchemy/dialects/postgresql/ranges.py,sha256=oiTmnZ-hd5WqqGNsXbuOJfoNxpbso_M_49gky8dlCrE,33978 -sqlalchemy/dialects/postgresql/types.py,sha256=pd1QmuGwJFLqpY2tK-Ql3FNjtT1Ha-lVvfaR9dimvHc,7603 -sqlalchemy/dialects/sqlite/__init__.py,sha256=MmQfjHun1U_4q-Dq_yhs9RzAX0VLixSwWeY5xWiDwag,1239 -sqlalchemy/dialects/sqlite/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/aiosqlite.cpython-312.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/base.cpython-312.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/dml.cpython-312.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/json.cpython-312.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/provision.cpython-312.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/pysqlcipher.cpython-312.pyc,, -sqlalchemy/dialects/sqlite/__pycache__/pysqlite.cpython-312.pyc,, -sqlalchemy/dialects/sqlite/aiosqlite.py,sha256=q9NcdCMnlOMiX2ySw-lqzZB0X8_nYoCmHES9SiIy9v0,12741 -sqlalchemy/dialects/sqlite/base.py,sha256=BdnrWOL6AmVDLpChIW32Snsq_gipbMXuov3WDNwpjDk,100643 -sqlalchemy/dialects/sqlite/dml.py,sha256=8JV6Ise7WtmFniy590X5b19AYZcE51M6N5hef7d9JoA,8683 -sqlalchemy/dialects/sqlite/json.py,sha256=-9afZnBt07vInCX20CKzjlTG85wHTO5_cxhcYU4phDc,2869 -sqlalchemy/dialects/sqlite/provision.py,sha256=nAXZPEjXFrb6a1LxXZMqKmkQoXgl3MPsSHuMyBQ76NU,5830 -sqlalchemy/dialects/sqlite/pysqlcipher.py,sha256=p0KfzHBwANDMwKTKEJCjR5RxMYqQwS4E8KXjl3Bx6Fw,5511 -sqlalchemy/dialects/sqlite/pysqlite.py,sha256=TcApeaXMjcM-IMRNEp4sQn-lE3Z6khLvoe7L63k7TN4,28824 -sqlalchemy/dialects/type_migration_guidelines.txt,sha256=gyh3JCauAIFi_9XEfqm3vYv_jb2Eqcz2HjpmC9ZEPMM,8384 -sqlalchemy/engine/__init__.py,sha256=93FWhb62dLCidc6e4FE65wq_P8GeoWQG1OG6RZMBqhM,2880 -sqlalchemy/engine/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/_py_processors.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/_py_row.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/_py_util.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/base.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/characteristics.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/create.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/cursor.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/default.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/events.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/interfaces.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/mock.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/processors.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/reflection.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/result.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/row.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/strategies.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/url.cpython-312.pyc,, -sqlalchemy/engine/__pycache__/util.cpython-312.pyc,, -sqlalchemy/engine/_py_processors.py,sha256=-jlAYPM6etmuKeViiI7BD41kqY0Pr8nzaox22TPqCCQ,3880 -sqlalchemy/engine/_py_row.py,sha256=UEGCjAeRsggcUn0QB0PdFC82kuykrOiOZ1KGq_Gf_qQ,3915 -sqlalchemy/engine/_py_util.py,sha256=nh1XoVq1b-eGgkdzbqFqzje0RNSmVWotoa6yaB7J5Sw,2558 -sqlalchemy/engine/base.py,sha256=ags97hnejh9awIFQ5t1RkHnZOSDJSUiSeaOqrPAU_4k,126333 -sqlalchemy/engine/characteristics.py,sha256=hfTuHv_WxSNHOS_LFIypSb27lqoZKRXIQKyT7ay5lTY,4920 -sqlalchemy/engine/create.py,sha256=-SCpvMx3DIwt8TD6Cyh_ChieKQ8y3hDD9YLDBIKgY6o,34081 -sqlalchemy/engine/cursor.py,sha256=zKN-AiwE-0KkvdWqhg8dOAT4R1U1a4I0tFgAqzihSnE,78573 -sqlalchemy/engine/default.py,sha256=vut1eoGPPFksx3iMTr1MNzLvM52GNgqzm9sZuvTVt24,87013 -sqlalchemy/engine/events.py,sha256=e0VHj69fH20sB7gocBhr5Rs2FjR8ioY4iE8VQt70oJg,38332 -sqlalchemy/engine/interfaces.py,sha256=a7kJmfaTRQgBvoUve_86CFI2tWGF-Htv_OQBJHCCQtU,116339 -sqlalchemy/engine/mock.py,sha256=wInBRiHwydTc5ELQLivdezDd1ikbSMVXgLVzZrSC0iQ,4310 -sqlalchemy/engine/processors.py,sha256=w4MiVMlU6VvfhIW49nygbHcwX8FteGpz7g3IGEqtZb8,2440 -sqlalchemy/engine/reflection.py,sha256=DYyhp221HdpnjtEKvIXoHFj6LvkUntMpK3gc7FwkhZ4,77462 -sqlalchemy/engine/result.py,sha256=U245Q3kGUOugqjmv-qSkx8eyDn27fLYV5agIoBHXQCA,79985 -sqlalchemy/engine/row.py,sha256=g7ZqmsqX_BtRUzY-zfXoZZ4-5xZ_KJEVbvqKHUIlqRg,12433 -sqlalchemy/engine/strategies.py,sha256=fD4DJn0AD371wlUa7s5Sy4j7QtgGyP7gMy_kUyqCLDQ,461 -sqlalchemy/engine/url.py,sha256=tOCRmKkqrpsIfNeSDoy6KKTLtQAMtoIn9xa5kmJQebk,31694 -sqlalchemy/engine/util.py,sha256=wIrPulEwr7kAxJ-vNj5QSPGNfWoFx5B4zzJdXu_ZIFg,5849 -sqlalchemy/event/__init__.py,sha256=09qZzHwt0PkIDsPwuPUVJvNakjtCBjuUJeY0AEJ9j7k,1022 -sqlalchemy/event/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/event/__pycache__/api.cpython-312.pyc,, -sqlalchemy/event/__pycache__/attr.cpython-312.pyc,, -sqlalchemy/event/__pycache__/base.cpython-312.pyc,, -sqlalchemy/event/__pycache__/legacy.cpython-312.pyc,, -sqlalchemy/event/__pycache__/registry.cpython-312.pyc,, -sqlalchemy/event/api.py,sha256=I7XWFczjgl3RcBH_52TbQ0S3_W4RZz1Be9dXkxtFw5U,8451 -sqlalchemy/event/attr.py,sha256=-SHjzXMOs7IICPSgNwpgRS3FIEeLIpB5PyvVlpw8Gp8,21406 -sqlalchemy/event/base.py,sha256=5JA45j3ncMWPXDeIPLJ_D5lX06Z3TJf8dla8bCriOkU,15597 -sqlalchemy/event/legacy.py,sha256=a8VEvS83PvgbomNnaSa3okZmTkxl_buZ7Lfilechjh8,8473 -sqlalchemy/event/registry.py,sha256=f31k0FLqIlWpOK9tksiYXnv-yuZPPz9iLQqvKEYV7ko,11221 -sqlalchemy/events.py,sha256=OAy8TK21lWzSe8bDUnAbmsP82bsBYy0LL19hR6y3BrM,542 -sqlalchemy/exc.py,sha256=k01TD2xp2BM3DrXdo2U5r8yuRfsoqBND4kwvtD1SVN0,24806 -sqlalchemy/ext/__init__.py,sha256=YbMQmRS_9HxRyWM-KA_F76WOss1_Em1ZcrnQDIDXoOc,333 -sqlalchemy/ext/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/associationproxy.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/automap.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/baked.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/compiler.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/horizontal_shard.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/hybrid.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/indexable.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/instrumentation.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/mutable.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/orderinglist.cpython-312.pyc,, -sqlalchemy/ext/__pycache__/serializer.cpython-312.pyc,, -sqlalchemy/ext/associationproxy.py,sha256=MBtGwISA4wwT9i6op8jfY6u9lCUYn_4JCqtxZpjggok,67776 -sqlalchemy/ext/asyncio/__init__.py,sha256=tKYIrERYf8hov9m8DuKWRO_53qhrvj2jRmIYjSGQ2Po,1342 -sqlalchemy/ext/asyncio/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/ext/asyncio/__pycache__/base.cpython-312.pyc,, -sqlalchemy/ext/asyncio/__pycache__/engine.cpython-312.pyc,, -sqlalchemy/ext/asyncio/__pycache__/exc.cpython-312.pyc,, -sqlalchemy/ext/asyncio/__pycache__/result.cpython-312.pyc,, -sqlalchemy/ext/asyncio/__pycache__/scoping.cpython-312.pyc,, -sqlalchemy/ext/asyncio/__pycache__/session.cpython-312.pyc,, -sqlalchemy/ext/asyncio/base.py,sha256=slWQTFdgQQlkzrnx3m5a9xT8IRg4iM0gkEbypXr_YXQ,9184 -sqlalchemy/ext/asyncio/engine.py,sha256=HJ5IZD0_xfVOMEGYZ1XtDir73SpzBk6ODDUN75ltvzo,49656 -sqlalchemy/ext/asyncio/exc.py,sha256=0awLfUB4PhEPVVTKYluyor1tW91GPZZnvdQ-GGSOmJY,660 -sqlalchemy/ext/asyncio/result.py,sha256=MtKAqA7hwYIdkpRxlCgHNYYzlB7dvqCtEp-aoDdFjDA,31370 -sqlalchemy/ext/asyncio/scoping.py,sha256=XZqvqtIXpWujkjV7DBikIsQcjaVLei1C3LlflZxiZ_o,54222 -sqlalchemy/ext/asyncio/session.py,sha256=iI6V7FA8TMgb5qSQBsy7DiacHhDxss4Hu2WU_NC3tCo,65039 -sqlalchemy/ext/automap.py,sha256=dIo-IoF9t6xcrBj1EV5caHPmPISXXzH-637utGlqi00,63272 -sqlalchemy/ext/baked.py,sha256=jc6vPocoXXsvdZsOsqgT4kG6guWSZD1TdPjoRBmkbRU,18381 -sqlalchemy/ext/compiler.py,sha256=PbvelWqZdzL6y1C6rEc8ledF79t_04MtYV26RUwNhik,20946 -sqlalchemy/ext/declarative/__init__.py,sha256=MHSOffOS4MWcqshAuLNQv0vDXpK_Z3lpGXTm1riyLls,1883 -sqlalchemy/ext/declarative/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/ext/declarative/__pycache__/extensions.cpython-312.pyc,, -sqlalchemy/ext/declarative/extensions.py,sha256=aPpW0PvTKH3CoSMhsOY5GcUMZOVq-OFsV1hflxmb3Lw,20095 -sqlalchemy/ext/horizontal_shard.py,sha256=V8vXEt5ZQb_PM39agZD2IyoQNGSqVI1MhY-6mNV5MRY,17231 -sqlalchemy/ext/hybrid.py,sha256=UpWd9hOD5I3hCT5FJW9twSDyShDVyygRIFtv-FmMHgM,53972 -sqlalchemy/ext/indexable.py,sha256=aDlVpN4rilRrer9qKg3kO7fqnqB5NX4M5qzYuYM8pvw,11373 -sqlalchemy/ext/instrumentation.py,sha256=lFsJECWlN1oc1E0r9TaQDZcxAx4VOz6PSHYrl5fLk9Y,16157 -sqlalchemy/ext/mutable.py,sha256=nAz3_lF2xkYSARt7GAWQh-OUMcnpe6s1ocjvQGxCPkc,38428 -sqlalchemy/ext/mypy/__init__.py,sha256=aqT8_9sNwzC8PIaEZ4zkCYGBvYPaDD3eCgJtJuk3g6A,247 -sqlalchemy/ext/mypy/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/ext/mypy/__pycache__/apply.cpython-312.pyc,, -sqlalchemy/ext/mypy/__pycache__/decl_class.cpython-312.pyc,, -sqlalchemy/ext/mypy/__pycache__/infer.cpython-312.pyc,, -sqlalchemy/ext/mypy/__pycache__/names.cpython-312.pyc,, -sqlalchemy/ext/mypy/__pycache__/plugin.cpython-312.pyc,, -sqlalchemy/ext/mypy/__pycache__/util.cpython-312.pyc,, -sqlalchemy/ext/mypy/apply.py,sha256=1Qb-_FpQ_0LVB2KFA5hVjfPv6DDMIcxXe86Ts1X9GBk,10870 -sqlalchemy/ext/mypy/decl_class.py,sha256=f2iWiFVlDFqGb_IoGGotI3IEOUErh25sLT7B_cMfx0g,17899 -sqlalchemy/ext/mypy/infer.py,sha256=O-3IjELDSBEAwGGxRM7lr0NWwGD0HMK4vda_iY6iwjs,19959 -sqlalchemy/ext/mypy/names.py,sha256=2bHYuQJe71c9JtuJSZP-WWGiOJmY9-FuMQGMDBB6dRs,10814 -sqlalchemy/ext/mypy/plugin.py,sha256=TDTziLsYFRqyX8UcQMtBBa6TFR4z9N-XNO8wRkHlEOI,10053 -sqlalchemy/ext/mypy/util.py,sha256=s3-QXtuJ5lBZHdhpMdCa36EBV-wic5Dl1hFOB_2bVqw,10317 -sqlalchemy/ext/orderinglist.py,sha256=r7La_3nZlGevIgsBL1IB30FvWO_tZHlTKo_FWwid-aY,14800 -sqlalchemy/ext/serializer.py,sha256=4DuLXwWsuZU8vyFw2kOmuelSXozpCgr_pH36af1mtVE,6321 -sqlalchemy/future/__init__.py,sha256=6-qPdjMHX-V-kAPjTQgNuHztmYiwKlJhKhhljuETvoQ,528 -sqlalchemy/future/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/future/__pycache__/engine.cpython-312.pyc,, -sqlalchemy/future/engine.py,sha256=N_5W2ab5-ueedWzqNdgLPzTW9audT1IbxF6FCDLRZOc,510 -sqlalchemy/inspection.py,sha256=GpmMuSAZ53u4W__iGpvzQKCBMFnTxnHt4Lo7Nq1FSKM,5237 -sqlalchemy/log.py,sha256=Sg6PGR_wmseiCCpJfRDEkaMs08XTPPsf0X_iYJLvzS0,8895 -sqlalchemy/orm/__init__.py,sha256=I-XesvuyjkAAwnsiF5FnXRLNV6W2nW70EnGAIt2GAjU,8633 -sqlalchemy/orm/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/_orm_constructors.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/_typing.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/attributes.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/base.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/bulk_persistence.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/clsregistry.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/collections.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/context.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/decl_api.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/decl_base.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/dependency.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/descriptor_props.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/dynamic.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/evaluator.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/events.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/exc.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/identity.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/instrumentation.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/interfaces.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/loading.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/mapped_collection.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/mapper.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/path_registry.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/persistence.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/properties.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/query.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/relationships.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/scoping.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/session.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/state.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/state_changes.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/strategies.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/strategy_options.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/sync.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/unitofwork.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/util.cpython-312.pyc,, -sqlalchemy/orm/__pycache__/writeonly.cpython-312.pyc,, -sqlalchemy/orm/_orm_constructors.py,sha256=163-sff88pVz5_wMCPplQD_w3xuzK4D1f4XseHTzlcI,103229 -sqlalchemy/orm/_typing.py,sha256=Z9GZT8Vb-wFwvHeOeVE37dvmCWdItLZnqI_pLin4cMc,5152 -sqlalchemy/orm/attributes.py,sha256=4PcKfGsqziKh-odMCSoLP37lCzVT27n8qrXuDEDZq74,95369 -sqlalchemy/orm/base.py,sha256=1yljObgot8oYfYoq3X13jfMMxSfdrMcLMPCcBPrrf9s,28437 -sqlalchemy/orm/bulk_persistence.py,sha256=sSpBwzYznCbh31LaYqfpAaMGkJ2V-5-gZghpVH1K2Bo,73514 -sqlalchemy/orm/clsregistry.py,sha256=2pE9BCnPEDvid_2zgFHllr2CTG_jLcx_ThdZo6zJ0ew,18545 -sqlalchemy/orm/collections.py,sha256=UOBbkTc4jCMbqM7fb9277VR1fCN5TRaFKV8pAcNyv78,53863 -sqlalchemy/orm/context.py,sha256=3egDf1kEB41tiNByqVj5_cUG4AcYDoYTZhxICBSAugA,116223 -sqlalchemy/orm/decl_api.py,sha256=i6mXKqNrCxu3bJKhhBt_KtsPZHVVDHemAFEQpUdQvAM,65881 -sqlalchemy/orm/decl_base.py,sha256=hbWWNbz1tiFD46qbp5hR-aGZMnG9JzulkY91e3vkRcs,85012 -sqlalchemy/orm/dependency.py,sha256=glstmbB4t-PIRA47u9NgTyyxbENfyQuG9Uzj2iezB_s,48935 -sqlalchemy/orm/descriptor_props.py,sha256=uRIgZMIRFACONzgiXz6q6wdvahby2s1ogkjHARhEVFk,38320 -sqlalchemy/orm/dynamic.py,sha256=MMPj4Esn_CRweI8YMbYcl9nNwcMWB4O6owOaFgcIvB0,10116 -sqlalchemy/orm/evaluator.py,sha256=Uttss9NiHt12sWWjH4XfK8hzQANciaefXwFf7On2cns,12732 -sqlalchemy/orm/events.py,sha256=8K_DUstcftizTDyYomGXo_GLbYdUG526JYQa9b5D7DA,131038 -sqlalchemy/orm/exc.py,sha256=fd24WdW3CP3oxFcz9CLXPeBIAfqJZbKv7K4G-5X4EOg,7641 -sqlalchemy/orm/identity.py,sha256=fOpANTf73r12F_w9DhVoyjkAdh8ldgJcNnwxx0GY8YM,9551 -sqlalchemy/orm/instrumentation.py,sha256=a8vi3qEAyO7Z9PYksLkFi_YzxqQhzB-anblegiAtsFw,25075 -sqlalchemy/orm/interfaces.py,sha256=tTV0drZDoYX2MhHUYQEGoALuTWxgfNdnKeYM3I38BO8,49971 -sqlalchemy/orm/loading.py,sha256=RCEUmTS--9QntLQcnwYVxHRsC3ILE58AVNHb8MGcybg,59959 -sqlalchemy/orm/mapped_collection.py,sha256=OjiojLC9caz_-0vt2289sS35bGOiM6GbxWywCg1RFZc,20239 -sqlalchemy/orm/mapper.py,sha256=WSDQZ_rwMd6mEwJDaS9oztcCBuNjw7fb5NlJiaLQtl0,175509 -sqlalchemy/orm/path_registry.py,sha256=GQS4KatFTi_6LKdi6I4185igHE6DDwrm8b1AMOHMom4,26731 -sqlalchemy/orm/persistence.py,sha256=MKb7TuSLJUQpyqnHxf6uNmGXSznmZgkkFTD04nHbNUQ,63483 -sqlalchemy/orm/properties.py,sha256=DicF2OHW-b5z7xJrahELKd5OPm9cVfTIYjQcoEM1naw,30192 -sqlalchemy/orm/query.py,sha256=KgFWo2xCpF26VHRD3oRLNvbckekuORcn1LPTjguDCZ0,121104 -sqlalchemy/orm/relationships.py,sha256=tj00j1ISHQmJbWeegWn8U29lL6WUnqd8fdY7NRHcLWU,132144 -sqlalchemy/orm/scoping.py,sha256=Na86AVpI1bry0ICm67wXT7CmCffVu3Muoorn9c-hLNk,80853 -sqlalchemy/orm/session.py,sha256=EDUpkGw5rx66579JfvtME2AeqGsdh1V2nznuc3dgDfc,201280 -sqlalchemy/orm/state.py,sha256=TB9954ZttIUj4uMF-4tfocwaDaq9g1uCbgXuuk-VwJA,38813 -sqlalchemy/orm/state_changes.py,sha256=4i90vDgBGvVGUzhlonlBkZBAZFOWaAXij2X8OEA3-BA,7013 -sqlalchemy/orm/strategies.py,sha256=BIVG-k9jQqKCJUuEptYvRrfYaHGB7tN9wcrUzTOzks0,119626 -sqlalchemy/orm/strategy_options.py,sha256=uajUK7bpjJGl8ygbbuzChwXiJEUd6dEHtjXEnqY1dlw,87922 -sqlalchemy/orm/sync.py,sha256=aMEMhYTj2rtJZJvjqm-cUx2CoQxYl8P6YddCLpLelhM,5943 -sqlalchemy/orm/unitofwork.py,sha256=THggzzAaqmYh5PBDob5dHTP_YyHXYdscs3fIxtRV-gE,27829 -sqlalchemy/orm/util.py,sha256=U9guwXlyWrAFUmR1lxAKh8SdeltGZgPV5WadhCb_QD4,83352 -sqlalchemy/orm/writeonly.py,sha256=j5DcpZKOv1tLGQLhKfk-Uw-B0yEG7LezwJWNTq0FtWQ,22983 -sqlalchemy/pool/__init__.py,sha256=ZKUPMKdBU57mhu677UsvRs5Aq9s9BwIbMmSNRoTRPoY,1848 -sqlalchemy/pool/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/pool/__pycache__/base.cpython-312.pyc,, -sqlalchemy/pool/__pycache__/events.cpython-312.pyc,, -sqlalchemy/pool/__pycache__/impl.cpython-312.pyc,, -sqlalchemy/pool/base.py,sha256=D0sKTRla6wpIFbELyGY2JEHUHR324rveIl93qjjmYr8,53751 -sqlalchemy/pool/events.py,sha256=ysyFh0mNDpL4N4rQ-o_BC6tpo_zt0_au_QLBgJqaKY8,13517 -sqlalchemy/pool/impl.py,sha256=BU5vUQ6NDFIldsG9og6mtO14SsqwpUqvwyGqZsKT6i0,19525 -sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -sqlalchemy/schema.py,sha256=UFhZjGmYoqN3zkId7M4CbVCd8KaeZUfKUjdlk0sHQ_E,3264 -sqlalchemy/sql/__init__.py,sha256=T16ZB3Za0Tq1LQGXeJuuxDkyu2t-XHR2t-8QH1mE1Uw,5965 -sqlalchemy/sql/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/_dml_constructors.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/_elements_constructors.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/_orm_types.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/_py_util.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/_selectable_constructors.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/_typing.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/annotation.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/base.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/cache_key.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/coercions.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/compiler.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/crud.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/ddl.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/default_comparator.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/dml.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/elements.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/events.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/expression.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/functions.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/lambdas.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/naming.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/operators.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/roles.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/schema.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/selectable.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/sqltypes.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/traversals.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/type_api.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/util.cpython-312.pyc,, -sqlalchemy/sql/__pycache__/visitors.cpython-312.pyc,, -sqlalchemy/sql/_dml_constructors.py,sha256=1xMH5Kd6SLhlFwfIs_lOXGC8GTrqW8mQM7Kc3cKyLuw,4007 -sqlalchemy/sql/_elements_constructors.py,sha256=gYSQwuVF0rTd40zGCrPzHdBAZj5kaCtf27S6sCVhbSU,64895 -sqlalchemy/sql/_orm_types.py,sha256=_bzlAh3-vTIZoLvAM2ry1SF7rsYRM3-jupfhGWZZn5Y,645 -sqlalchemy/sql/_py_util.py,sha256=VzThcXk7fKqT9_mZmXrkxePdwyyl_wIciCftzl2Z_-g,2248 -sqlalchemy/sql/_selectable_constructors.py,sha256=mRgtlGyctlb1LMBqFxgn0eGzIXMbyZtQafjUuJWhYjs,19415 -sqlalchemy/sql/_typing.py,sha256=FzAwrbL9o2CMluxxj1MfYh9ut7NDqkzD6dkWk5tz_dI,13231 -sqlalchemy/sql/annotation.py,sha256=PslN1KQV9hN8Ji4k8I3-W-cDuRMCCLwMmJcg-n86Yy4,18830 -sqlalchemy/sql/base.py,sha256=LY4zzFtADcqGcvKPUuuC9rK-vucqmHn3zKklVivoZ-M,76110 -sqlalchemy/sql/cache_key.py,sha256=nEvUQ4yjtWWblrKjLLDd_b9i5zudgYhkOdJvI1U8Lvo,34725 -sqlalchemy/sql/coercions.py,sha256=AviiI2LmotbNgV4JLBpUDJABLiEz4pc5ob4VnCi2i4I,42033 -sqlalchemy/sql/compiler.py,sha256=GXd4pruoy7-fMTc8-4GSQyHCTqBMzpykn3WVvqxhkYM,282505 -sqlalchemy/sql/crud.py,sha256=YL1HIVrUkcduncUZ6F4gEBf5PlowguGkxMAlgMVlKgI,58183 -sqlalchemy/sql/ddl.py,sha256=NbW8F3UT4BTnda5w5TNPGxXPtv0wHSNB51hhr4gBSJM,46980 -sqlalchemy/sql/default_comparator.py,sha256=lXmd8yAUzfyeP5w4vebrQG99oC0bTrmdGc0crBq1GKw,17259 -sqlalchemy/sql/dml.py,sha256=lt5FC6BbJNotE65U-fmvEovBxkADfKBnVcnkVYYQxUM,67431 -sqlalchemy/sql/elements.py,sha256=FudYJXww47JMyXnyZnX2foxsXITLLhFTQkFIWvPXf0s,182043 -sqlalchemy/sql/events.py,sha256=pG3jqJbPX18N9Amp47aXiQYMYD_HL_lOXHk-0m8m7Hw,18745 -sqlalchemy/sql/expression.py,sha256=T-AgCPp30tgKQYLKeSyqQg_VoJFE69m2yDTz6fn-u1E,7748 -sqlalchemy/sql/functions.py,sha256=9iazLxbvCMH35CzgtZaNJ0IUn8wz04w9DofY3jNfIHc,65817 -sqlalchemy/sql/lambdas.py,sha256=eKlhUhD8urNVvOm_1tUf8ESPIpo2qTAidKHJEarUhj8,50741 -sqlalchemy/sql/naming.py,sha256=ERVjqo6fBHBw2BwNgpbb5cvsCkq1jjdztczP9BKzVt8,7070 -sqlalchemy/sql/operators.py,sha256=CgijIxSVCh00XnqfhtK0sHD-gR5HlQt8LdJUZlPBu2E,78685 -sqlalchemy/sql/roles.py,sha256=8nO4y1hbP1cA8IzeOn6uPgNZNVILb3E-IMeJWOIScu8,7985 -sqlalchemy/sql/schema.py,sha256=bCxaa_Y_wZrkDauJX4Cr4Lr1vZaqNCoK3Dzd0ki_fsQ,235875 -sqlalchemy/sql/selectable.py,sha256=5jDioKY0V9fT731TtWHtUTKuzO6beUABx9l-fjh-ywo,243464 -sqlalchemy/sql/sqltypes.py,sha256=EnA7UEZmvM3ov4sYljXp7CLQco_Z4ZyMV6E25C6NH28,131151 -sqlalchemy/sql/traversals.py,sha256=dPHz1Kyyo_a5JcJiajR39iQejisGqefqUjYXqlBT7Vo,34688 -sqlalchemy/sql/type_api.py,sha256=_6MqWtAmgSYSDWEckR3R1CJPVB_7No0QUJmD68Vo1fQ,86035 -sqlalchemy/sql/util.py,sha256=ftTiyNGeJK0MIRMqWMV7Xf8iZuiRGocoJRp3MIO3F3Y,49563 -sqlalchemy/sql/visitors.py,sha256=oudlabsf9qleuC78GFe_iflRSAD8H-HjaM7T8Frc538,37482 -sqlalchemy/testing/__init__.py,sha256=X7Td6ZZXYkZ_I58YPcHZnZ11RwwAmbnsC1ds3F6lOj8,3256 -sqlalchemy/testing/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/assertions.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/assertsql.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/asyncio.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/config.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/engines.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/entities.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/exclusions.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/pickleable.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/profiling.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/provision.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/requirements.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/schema.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/util.cpython-312.pyc,, -sqlalchemy/testing/__pycache__/warnings.cpython-312.pyc,, -sqlalchemy/testing/assertions.py,sha256=bBn2Ep89FF-WBmzh0VkvnJ9gNMKuqk8OXq7ALpUwar4,32428 -sqlalchemy/testing/assertsql.py,sha256=gj4YRBR9cjOtS1WgR3nsyIze1tmqctsNs1uCV8N2Q4w,17333 -sqlalchemy/testing/asyncio.py,sha256=xYuWjKFHzolBLgddy1ePI9l8KRRUOWpT-FWjhtV2Ei0,3965 -sqlalchemy/testing/config.py,sha256=jfFVUiAOm8im6SlqyAdZVSaA51kmADgfBDqrHnngH7c,12517 -sqlalchemy/testing/engines.py,sha256=8R7nbmLNUv2w7tiyVpiVI1s-57wpEs70UAgY-pkPX8k,13953 -sqlalchemy/testing/entities.py,sha256=Um-DFSz81p06DhTK899ZRUOZRw3FtUDeNMVHcIg3eLc,3471 -sqlalchemy/testing/exclusions.py,sha256=8kjsaFfjCvPlLsQLD_LIDwuqvVlIVbD5qTWBlKdtNkM,12895 -sqlalchemy/testing/fixtures/__init__.py,sha256=B1IFCzEVdCqhEvFrLmgxZ_Fr08jDus5FddSA-lnnAAU,1226 -sqlalchemy/testing/fixtures/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/testing/fixtures/__pycache__/base.cpython-312.pyc,, -sqlalchemy/testing/fixtures/__pycache__/mypy.cpython-312.pyc,, -sqlalchemy/testing/fixtures/__pycache__/orm.cpython-312.pyc,, -sqlalchemy/testing/fixtures/__pycache__/sql.cpython-312.pyc,, -sqlalchemy/testing/fixtures/base.py,sha256=S0ODuph0jA2Za4GN3NNhYVIqN9jAa3Q9Vd1N4O4rcTc,12622 -sqlalchemy/testing/fixtures/mypy.py,sha256=2H8QxvGvwsb_Z3alRtvCvfXeqGjOb8aemfoYxQiuGMc,12285 -sqlalchemy/testing/fixtures/orm.py,sha256=6JvQpIfmgmSTH3Hie4nhmUFfvH0pseujIFA9Lup2Dzw,6322 -sqlalchemy/testing/fixtures/sql.py,sha256=w0qyMBLibPqkhE5ZhJtHcJmIDJlwcPpnlq9v0WajrF8,16403 -sqlalchemy/testing/pickleable.py,sha256=uYLl557iNep6jSOVl0vK1GwaLHUKidALoPJc-QIrC08,2988 -sqlalchemy/testing/plugin/__init__.py,sha256=bbtVIt7LzVnUCcVxHWRH2owOQD067bQwwhyMf_whqHs,253 -sqlalchemy/testing/plugin/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,, -sqlalchemy/testing/plugin/__pycache__/plugin_base.cpython-312.pyc,, -sqlalchemy/testing/plugin/__pycache__/pytestplugin.cpython-312.pyc,, -sqlalchemy/testing/plugin/bootstrap.py,sha256=USn6pE-JcE5pSmnEd2wad3goKLx2hdJS3AUUFpXHm-I,1736 -sqlalchemy/testing/plugin/plugin_base.py,sha256=CgrNj2wj9KNALu9YfnGSaHX2fXfTtiim_cfx0CPVoy8,22357 -sqlalchemy/testing/plugin/pytestplugin.py,sha256=acuAWFec8QGzC_AWOhTsRRgB6dttkbNdoyGVb7WvTng,28524 -sqlalchemy/testing/profiling.py,sha256=o8_V3TpF_WytudMQQLm1UxlfNDrLCWxUvkH-Kd0unKU,10472 -sqlalchemy/testing/provision.py,sha256=l9KUBqR8_NLACZzE1f_twzjlkaVUzRa5tBwgJBmvqrY,15122 -sqlalchemy/testing/requirements.py,sha256=CuMwr9ZvQV-3va7jZ_dx8CW2TsjlhcQJ_DdNHvmFpJQ,54650 -sqlalchemy/testing/schema.py,sha256=z2Z5rm3iJ1-vgifUxwzxEjt1qu7QOyr3TeDnQdCHlWE,6737 -sqlalchemy/testing/suite/__init__.py,sha256=YvTEqUNHaBlgLgWDAWn79mQrUR4VBGUHtprywJlmDT8,741 -sqlalchemy/testing/suite/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_cte.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_ddl.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_deprecations.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_dialect.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_insert.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_reflection.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_results.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_rowcount.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_select.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_sequence.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_types.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_unicode_ddl.cpython-312.pyc,, -sqlalchemy/testing/suite/__pycache__/test_update_delete.cpython-312.pyc,, -sqlalchemy/testing/suite/test_cte.py,sha256=C_viXJKClFAm91rtPb42tiAA7gYJwKkqGYVJYap0cLM,6662 -sqlalchemy/testing/suite/test_ddl.py,sha256=k6D6RreLkDSSpRUM2hQz-_CA48qV2PYx_2LNyUSoZzE,12420 -sqlalchemy/testing/suite/test_deprecations.py,sha256=SKRFZDteBO1rw9-BQjDic5nh7fdyw2ypVOewR2pj7-Q,5490 -sqlalchemy/testing/suite/test_dialect.py,sha256=ftOWRXWOotB2_jMJJqwoH9f3X2ucc1HwwOiXp573GwM,23663 -sqlalchemy/testing/suite/test_insert.py,sha256=v3zrUZaGlke3cI4vabHg7xaI4gNqcHhtMPgYuf0mOxc,19454 -sqlalchemy/testing/suite/test_reflection.py,sha256=J--LmZcLez3B21WANzLj2zC_6gcKX-ycBQW2CMQ_azA,112873 -sqlalchemy/testing/suite/test_results.py,sha256=i2a_XiUJ5D9womKtJsveVUBUeJRpOOPrr1fJuen_qEY,17416 -sqlalchemy/testing/suite/test_rowcount.py,sha256=DCEGxorDcrT5JCLd3_SNQeZmxT6sKIcuKxX1r6vK4Mg,8158 -sqlalchemy/testing/suite/test_select.py,sha256=_HTef0mBmcvL2uS9z4G73AXZwXvjerQpl8wF1vqPt4I,63731 -sqlalchemy/testing/suite/test_sequence.py,sha256=sIqkfgVqPIgl4lm75EPdag9gK-rTHfUm3pWX-JijPy4,10240 -sqlalchemy/testing/suite/test_types.py,sha256=6RiN7L1g3b_3GUw8G_i39WB3M0W4tHNYTiwu57thzR0,69946 -sqlalchemy/testing/suite/test_unicode_ddl.py,sha256=juF_KTK1nGrSlsL8z0Ky0rFSNkPGheLB3e0Kq3yRqss,6330 -sqlalchemy/testing/suite/test_update_delete.py,sha256=TnJI5U_ZEuu3bni4sH-S6CENxvSZwDgZL-FKSV45bAo,4133 -sqlalchemy/testing/util.py,sha256=rQqTjYvb9wQt1UfeZZHNuPG3aNxFyjrvVQUhtfZQYjc,15006 -sqlalchemy/testing/warnings.py,sha256=3EhbTlPe4gJnoydj-OKueNOOtGwIRF2kV4XvlFwFYOA,1598 -sqlalchemy/types.py,sha256=unCm_O8qKxU3LjLbqeqSNQSsK5k5R5POsyEx2gH6CF4,3244 -sqlalchemy/util/__init__.py,sha256=3-O9j9qPk-gTx6hlyLsISc_JOW5MhjV0J_L5nV19qI8,8436 -sqlalchemy/util/__pycache__/__init__.cpython-312.pyc,, -sqlalchemy/util/__pycache__/_collections.cpython-312.pyc,, -sqlalchemy/util/__pycache__/_concurrency_py3k.cpython-312.pyc,, -sqlalchemy/util/__pycache__/_has_cy.cpython-312.pyc,, -sqlalchemy/util/__pycache__/_py_collections.cpython-312.pyc,, -sqlalchemy/util/__pycache__/compat.cpython-312.pyc,, -sqlalchemy/util/__pycache__/concurrency.cpython-312.pyc,, -sqlalchemy/util/__pycache__/deprecations.cpython-312.pyc,, -sqlalchemy/util/__pycache__/langhelpers.cpython-312.pyc,, -sqlalchemy/util/__pycache__/preloaded.cpython-312.pyc,, -sqlalchemy/util/__pycache__/queue.cpython-312.pyc,, -sqlalchemy/util/__pycache__/tool_support.cpython-312.pyc,, -sqlalchemy/util/__pycache__/topological.cpython-312.pyc,, -sqlalchemy/util/__pycache__/typing.cpython-312.pyc,, -sqlalchemy/util/_collections.py,sha256=602PFadp7l1ZMJdqNTt7gkj-4-Xid0Ioo2xy4o6ICVI,20793 -sqlalchemy/util/_concurrency_py3k.py,sha256=8GB5fJVXvKp8BGyDBmFff3fTXQ71QUzkALiscX8qfYE,9458 -sqlalchemy/util/_has_cy.py,sha256=IHGc5hUFbXQuv1a1z2P8yVwz0yGbCYXyQM2qsdcBTyg,1287 -sqlalchemy/util/_py_collections.py,sha256=2PUqiKIsF8d-gNDAAqYI8WE6XPyRf1flRLkVsJeXuOo,17255 -sqlalchemy/util/compat.py,sha256=cIgpoXoktBdGfkOdQOfczU8siP6QHjhFnOQlMEUENRA,9024 -sqlalchemy/util/concurrency.py,sha256=zlmuK99p5cPpEPxBQYSDfLHP0Pbuw4iDeUzU49Pb1Ow,3412 -sqlalchemy/util/deprecations.py,sha256=AnHpDWHi7g2gv_QUTGStQTnr0J94lIF-3aFLOsv9yzg,12372 -sqlalchemy/util/langhelpers.py,sha256=6-bwfvTw6x1Ev8rsnHFHXYJOZVJvphrv85fQAGtAyn0,67308 -sqlalchemy/util/preloaded.py,sha256=78Sl7VjzTOPajbovvARxNeuZb-iYRpEvL5k8m5Bz4vQ,6054 -sqlalchemy/util/queue.py,sha256=4SbSbVamUECjCDpMPR035N1ooVHt9W5GjbqkxfZmH5k,10507 -sqlalchemy/util/tool_support.py,sha256=DuurikYgDUIIxk3gubUKl6rs-etXt3eeHaZ4ZkIyJXQ,6336 -sqlalchemy/util/topological.py,sha256=_NdtAghZjhZ4e2fwWHmn25erP5cvtGgOUMplsCa_VCE,3578 -sqlalchemy/util/typing.py,sha256=BTBK8Lb_4wm_1Jop0U98ULBk3a2PaN2wA1OegAhv1tA,18811 diff --git a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/WHEEL b/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/WHEEL deleted file mode 100644 index e4b52df..0000000 --- a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.1.0) -Root-Is-Purelib: false -Tag: cp312-cp312-win_amd64 - diff --git a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/top_level.txt b/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/top_level.txt deleted file mode 100644 index 39fb2be..0000000 --- a/myenv/Lib/site-packages/SQLAlchemy-2.0.35.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -sqlalchemy diff --git a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/INSTALLER b/myenv/Lib/site-packages/alembic-1.13.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/LICENSE b/myenv/Lib/site-packages/alembic-1.13.3.dist-info/LICENSE deleted file mode 100644 index be8de00..0000000 --- a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright 2009-2024 Michael Bayer. - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/METADATA b/myenv/Lib/site-packages/alembic-1.13.3.dist-info/METADATA deleted file mode 100644 index 2754bc0..0000000 --- a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/METADATA +++ /dev/null @@ -1,142 +0,0 @@ -Metadata-Version: 2.1 -Name: alembic -Version: 1.13.3 -Summary: A database migration tool for SQLAlchemy. -Home-page: https://alembic.sqlalchemy.org -Author: Mike Bayer -Author-email: mike_mp@zzzcomputing.com -License: MIT -Project-URL: Documentation, https://alembic.sqlalchemy.org/en/latest/ -Project-URL: Changelog, https://alembic.sqlalchemy.org/en/latest/changelog.html -Project-URL: Source, https://github.com/sqlalchemy/alembic/ -Project-URL: Issue Tracker, https://github.com/sqlalchemy/alembic/issues/ -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Environment :: Console -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Database :: Front-Ends -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: SQLAlchemy >=1.3.0 -Requires-Dist: Mako -Requires-Dist: typing-extensions >=4 -Requires-Dist: importlib-metadata ; python_version < "3.9" -Requires-Dist: importlib-resources ; python_version < "3.9" -Provides-Extra: tz -Requires-Dist: backports.zoneinfo ; (python_version < "3.9") and extra == 'tz' - -Alembic is a database migrations tool written by the author -of `SQLAlchemy `_. A migrations tool -offers the following functionality: - -* Can emit ALTER statements to a database in order to change - the structure of tables and other constructs -* Provides a system whereby "migration scripts" may be constructed; - each script indicates a particular series of steps that can "upgrade" a - target database to a new version, and optionally a series of steps that can - "downgrade" similarly, doing the same steps in reverse. -* Allows the scripts to execute in some sequential manner. - -The goals of Alembic are: - -* Very open ended and transparent configuration and operation. A new - Alembic environment is generated from a set of templates which is selected - among a set of options when setup first occurs. The templates then deposit a - series of scripts that define fully how database connectivity is established - and how migration scripts are invoked; the migration scripts themselves are - generated from a template within that series of scripts. The scripts can - then be further customized to define exactly how databases will be - interacted with and what structure new migration files should take. -* Full support for transactional DDL. The default scripts ensure that all - migrations occur within a transaction - for those databases which support - this (Postgresql, Microsoft SQL Server), migrations can be tested with no - need to manually undo changes upon failure. -* Minimalist script construction. Basic operations like renaming - tables/columns, adding/removing columns, changing column attributes can be - performed through one line commands like alter_column(), rename_table(), - add_constraint(). There is no need to recreate full SQLAlchemy Table - structures for simple operations like these - the functions themselves - generate minimalist schema structures behind the scenes to achieve the given - DDL sequence. -* "auto generation" of migrations. While real world migrations are far more - complex than what can be automatically determined, Alembic can still - eliminate the initial grunt work in generating new migration directives - from an altered schema. The ``--autogenerate`` feature will inspect the - current status of a database using SQLAlchemy's schema inspection - capabilities, compare it to the current state of the database model as - specified in Python, and generate a series of "candidate" migrations, - rendering them into a new migration script as Python directives. The - developer then edits the new file, adding additional directives and data - migrations as needed, to produce a finished migration. Table and column - level changes can be detected, with constraints and indexes to follow as - well. -* Full support for migrations generated as SQL scripts. Those of us who - work in corporate environments know that direct access to DDL commands on a - production database is a rare privilege, and DBAs want textual SQL scripts. - Alembic's usage model and commands are oriented towards being able to run a - series of migrations into a textual output file as easily as it runs them - directly to a database. Care must be taken in this mode to not invoke other - operations that rely upon in-memory SELECTs of rows - Alembic tries to - provide helper constructs like bulk_insert() to help with data-oriented - operations that are compatible with script-based DDL. -* Non-linear, dependency-graph versioning. Scripts are given UUID - identifiers similarly to a DVCS, and the linkage of one script to the next - is achieved via human-editable markers within the scripts themselves. - The structure of a set of migration files is considered as a - directed-acyclic graph, meaning any migration file can be dependent - on any other arbitrary set of migration files, or none at - all. Through this open-ended system, migration files can be organized - into branches, multiple roots, and mergepoints, without restriction. - Commands are provided to produce new branches, roots, and merges of - branches automatically. -* Provide a library of ALTER constructs that can be used by any SQLAlchemy - application. The DDL constructs build upon SQLAlchemy's own DDLElement base - and can be used standalone by any application or script. -* At long last, bring SQLite and its inability to ALTER things into the fold, - but in such a way that SQLite's very special workflow needs are accommodated - in an explicit way that makes the most of a bad situation, through the - concept of a "batch" migration, where multiple changes to a table can - be batched together to form a series of instructions for a single, subsequent - "move-and-copy" workflow. You can even use "move-and-copy" workflow for - other databases, if you want to recreate a table in the background - on a busy system. - -Documentation and status of Alembic is at https://alembic.sqlalchemy.org/ - -The SQLAlchemy Project -====================== - -Alembic is part of the `SQLAlchemy Project `_ and -adheres to the same standards and conventions as the core project. - -Development / Bug reporting / Pull requests -___________________________________________ - -Please refer to the -`SQLAlchemy Community Guide `_ for -guidelines on coding and participating in this project. - -Code of Conduct -_______________ - -Above all, SQLAlchemy places great emphasis on polite, thoughtful, and -constructive communication between users and developers. -Please see our current Code of Conduct at -`Code of Conduct `_. - -License -======= - -Alembic is distributed under the `MIT license -`_. diff --git a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/RECORD b/myenv/Lib/site-packages/alembic-1.13.3.dist-info/RECORD deleted file mode 100644 index a860fd1..0000000 --- a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/RECORD +++ /dev/null @@ -1,150 +0,0 @@ -../../Scripts/alembic.exe,sha256=D3oBTY36jraQOWLXX1ragqqTe7jV8hdhgoFht0-aIv4,108444 -alembic-1.13.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -alembic-1.13.3.dist-info/LICENSE,sha256=zhnnuit3ylhLgqZ5KFbhOOswsxHIlrB2wJpAXuRfvuk,1059 -alembic-1.13.3.dist-info/METADATA,sha256=6CGNPkq-FufkoGWO6PUhx64LztmqCUAyhbObbop42yQ,7390 -alembic-1.13.3.dist-info/RECORD,, -alembic-1.13.3.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91 -alembic-1.13.3.dist-info/entry_points.txt,sha256=aykM30soxwGN0pB7etLc1q0cHJbL9dy46RnK9VX4LLw,48 -alembic-1.13.3.dist-info/top_level.txt,sha256=FwKWd5VsPFC8iQjpu1u9Cn-JnK3-V1RhUCmWqz1cl-s,8 -alembic/__init__.py,sha256=IZLF4dqHa7mXm10zXXAxFsXBP7yE27ZxghGv3BhN-Jw,63 -alembic/__main__.py,sha256=373m7-TBh72JqrSMYviGrxCHZo-cnweM8AGF8A22PmY,78 -alembic/__pycache__/__init__.cpython-312.pyc,, -alembic/__pycache__/__main__.cpython-312.pyc,, -alembic/__pycache__/command.cpython-312.pyc,, -alembic/__pycache__/config.cpython-312.pyc,, -alembic/__pycache__/context.cpython-312.pyc,, -alembic/__pycache__/environment.cpython-312.pyc,, -alembic/__pycache__/migration.cpython-312.pyc,, -alembic/__pycache__/op.cpython-312.pyc,, -alembic/autogenerate/__init__.py,sha256=ntmUTXhjLm4_zmqIwyVaECdpPDn6_u1yM9vYk6-553E,543 -alembic/autogenerate/__pycache__/__init__.cpython-312.pyc,, -alembic/autogenerate/__pycache__/api.cpython-312.pyc,, -alembic/autogenerate/__pycache__/compare.cpython-312.pyc,, -alembic/autogenerate/__pycache__/render.cpython-312.pyc,, -alembic/autogenerate/__pycache__/rewriter.cpython-312.pyc,, -alembic/autogenerate/api.py,sha256=Bh-37G0PSFeT9WSfEQ-3TZoainXGLL2nsl4okv_xYc0,22173 -alembic/autogenerate/compare.py,sha256=cdUBH6qsedaJsnToSOu4MfcJaI4bjUJ4VWqtBlqsSr8,44944 -alembic/autogenerate/render.py,sha256=YB3C90rq7XDhjTia9GAnK6yfnVVzCROziZrbArmG9SE,35481 -alembic/autogenerate/rewriter.py,sha256=uZWRkTYJoncoEJ5WY1QBRiozjyChqZDJPy4LtcRibjM,7846 -alembic/command.py,sha256=2tkKrIoEgPfXrGgvMRGrUXH4l-7z466DOxd7Q2XOfL8,22169 -alembic/config.py,sha256=BZ7mwFRk2gq8GFNxxy9qvMUFx43YbDbQTC99OnjqiKY,22216 -alembic/context.py,sha256=hK1AJOQXJ29Bhn276GYcosxeG7pC5aZRT5E8c4bMJ4Q,195 -alembic/context.pyi,sha256=hUHbSnbSeEEMVkk0gDSXOq4_9edSjYzsjmmf-mL9Iao,31737 -alembic/ddl/__init__.py,sha256=Df8fy4Vn_abP8B7q3x8gyFwEwnLw6hs2Ljt_bV3EZWE,152 -alembic/ddl/__pycache__/__init__.cpython-312.pyc,, -alembic/ddl/__pycache__/_autogen.cpython-312.pyc,, -alembic/ddl/__pycache__/base.cpython-312.pyc,, -alembic/ddl/__pycache__/impl.cpython-312.pyc,, -alembic/ddl/__pycache__/mssql.cpython-312.pyc,, -alembic/ddl/__pycache__/mysql.cpython-312.pyc,, -alembic/ddl/__pycache__/oracle.cpython-312.pyc,, -alembic/ddl/__pycache__/postgresql.cpython-312.pyc,, -alembic/ddl/__pycache__/sqlite.cpython-312.pyc,, -alembic/ddl/_autogen.py,sha256=Blv2RrHNyF4cE6znCQXNXG5T9aO-YmiwD4Fz-qfoaWA,9275 -alembic/ddl/base.py,sha256=fzGvWyvpSluIOKDQ7Ajc-i_jlDpH4j-JZFOOPxxYS-s,9986 -alembic/ddl/impl.py,sha256=VggQMr6aqeVw12Cj4EqJpiETMhbrwIiG22HEJtPcR4s,29067 -alembic/ddl/mssql.py,sha256=ydvgBSaftKYjaBaMyqius66Ta4CICQSj79Og3Ed2atY,14219 -alembic/ddl/mysql.py,sha256=kXOGYmpnL_9WL3ijXNsG4aAwy3m1HWJOoLZSePzmJF0,17316 -alembic/ddl/oracle.py,sha256=669YlkcZihlXFbnXhH2krdrvDry8q5pcUGfoqkg_R6Y,6243 -alembic/ddl/postgresql.py,sha256=GNCnx-N8UsCIstfW49J8ivYcKgRB8KFNPRgNtORC_AM,29883 -alembic/ddl/sqlite.py,sha256=wLXhb8bJWRspKQTb-iVfepR4LXYgOuEbUWKX5qwDhIQ,7570 -alembic/environment.py,sha256=MM5lPayGT04H3aeng1H7GQ8HEAs3VGX5yy6mDLCPLT4,43 -alembic/migration.py,sha256=MV6Fju6rZtn2fTREKzXrCZM6aIBGII4OMZFix0X-GLs,41 -alembic/op.py,sha256=flHtcsVqOD-ZgZKK2pv-CJ5Cwh-KJ7puMUNXzishxLw,167 -alembic/op.pyi,sha256=QZ1ERetxIrpZNTyg48Btn5OJhhpMId-_MLMP36RauOw,50168 -alembic/operations/__init__.py,sha256=e0KQSZAgLpTWvyvreB7DWg7RJV_MWSOPVDgCqsd2FzY,318 -alembic/operations/__pycache__/__init__.cpython-312.pyc,, -alembic/operations/__pycache__/base.cpython-312.pyc,, -alembic/operations/__pycache__/batch.cpython-312.pyc,, -alembic/operations/__pycache__/ops.cpython-312.pyc,, -alembic/operations/__pycache__/schemaobj.cpython-312.pyc,, -alembic/operations/__pycache__/toimpl.cpython-312.pyc,, -alembic/operations/base.py,sha256=JRaOtPqyqfaPjzGHxuP9VMcO1KsJNmbbLOvwG82qxGA,74474 -alembic/operations/batch.py,sha256=YqtD4hJ3_RkFxvI7zbmBwxcLEyLHYyWQpsz4l5L85yI,26943 -alembic/operations/ops.py,sha256=guIpLQzlqgkdP2LGDW8vWg_DXeAouEldiVZDgRas7YI,94953 -alembic/operations/schemaobj.py,sha256=Wp-bBe4a8lXPTvIHJttBY0ejtpVR5Jvtb2kI-U2PztQ,9468 -alembic/operations/toimpl.py,sha256=Fx-UKcq6S8pVtsEwPFjTKtEcAVKjfptn-BfpE1k3_ck,7517 -alembic/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alembic/runtime/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alembic/runtime/__pycache__/__init__.cpython-312.pyc,, -alembic/runtime/__pycache__/environment.cpython-312.pyc,, -alembic/runtime/__pycache__/migration.cpython-312.pyc,, -alembic/runtime/environment.py,sha256=SkYB_am1h3FSG8IsExAQxGP_7WwzOVigqjlO747Aokc,41497 -alembic/runtime/migration.py,sha256=sku7-2_TralZQnNeoDaEFlydTStL-SJechbr9K8AHJs,50093 -alembic/script/__init__.py,sha256=lSj06O391Iy5avWAiq8SPs6N8RBgxkSPjP8wpXcNDGg,100 -alembic/script/__pycache__/__init__.cpython-312.pyc,, -alembic/script/__pycache__/base.cpython-312.pyc,, -alembic/script/__pycache__/revision.cpython-312.pyc,, -alembic/script/__pycache__/write_hooks.cpython-312.pyc,, -alembic/script/base.py,sha256=XLNpdsLnBBSz4ZKMFUArFUdtL1HcjtuUDHNbA-5VlZA,37809 -alembic/script/revision.py,sha256=NTu-eu5Y78u4NoVXpT0alpD2oL40SGATA2sEMEf1el4,62306 -alembic/script/write_hooks.py,sha256=NGB6NGgfdf7HK6XNNpSKqUCfzxazj-NRUePgFx7MJSM,5036 -alembic/templates/async/README,sha256=ISVtAOvqvKk_5ThM5ioJE-lMkvf9IbknFUFVU_vPma4,58 -alembic/templates/async/__pycache__/env.cpython-312.pyc,, -alembic/templates/async/alembic.ini.mako,sha256=7VfUJqH9tEsydKOOmpnGFTsERHWhs7ghORuASnJb_Co,3632 -alembic/templates/async/env.py,sha256=zbOCf3Y7w2lg92hxSwmG1MM_7y56i_oRH4AKp0pQBYo,2389 -alembic/templates/async/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635 -alembic/templates/generic/README,sha256=MVlc9TYmr57RbhXET6QxgyCcwWP7w-vLkEsirENqiIQ,38 -alembic/templates/generic/__pycache__/env.cpython-312.pyc,, -alembic/templates/generic/alembic.ini.mako,sha256=5wy1rOdDJjHbeEnieycSaZ9tz6AM6hONYk4RiOVXnmk,3740 -alembic/templates/generic/env.py,sha256=TLRWOVW3Xpt_Tpf8JFzlnoPn_qoUu8UV77Y4o9XD6yI,2103 -alembic/templates/generic/script.py.mako,sha256=MEqL-2qATlST9TAOeYgscMn1uy6HUS9NFvDgl93dMj8,635 -alembic/templates/multidb/README,sha256=dWLDhnBgphA4Nzb7sNlMfCS3_06YqVbHhz-9O5JNqyI,606 -alembic/templates/multidb/__pycache__/env.cpython-312.pyc,, -alembic/templates/multidb/alembic.ini.mako,sha256=oThPQgzkg-NOcbsSXIemi-Lg4nib3G6hDHKdejjtJIM,3834 -alembic/templates/multidb/env.py,sha256=6zNjnW8mXGUk7erTsAvrfhvqoczJ-gagjVq1Ypg2YIQ,4230 -alembic/templates/multidb/script.py.mako,sha256=N06nMtNSwHkgl0EBXDyMt8njp9tlOesR583gfq21nbY,1090 -alembic/testing/__init__.py,sha256=kOxOh5nwmui9d-_CCq9WA4Udwy7ITjm453w74CTLZDo,1159 -alembic/testing/__pycache__/__init__.cpython-312.pyc,, -alembic/testing/__pycache__/assertions.cpython-312.pyc,, -alembic/testing/__pycache__/env.cpython-312.pyc,, -alembic/testing/__pycache__/fixtures.cpython-312.pyc,, -alembic/testing/__pycache__/requirements.cpython-312.pyc,, -alembic/testing/__pycache__/schemacompare.cpython-312.pyc,, -alembic/testing/__pycache__/util.cpython-312.pyc,, -alembic/testing/__pycache__/warnings.cpython-312.pyc,, -alembic/testing/assertions.py,sha256=ScUb1sVopIl70BirfHUJDvwswC70Q93CiIWwkiZbhHg,5207 -alembic/testing/env.py,sha256=zJacVb_z6uLs2U1TtkmnFH9P3_F-3IfYbVv4UEPOvfo,10754 -alembic/testing/fixtures.py,sha256=nBntOynOmVCFc7IYiN3DIQ3TBNTfiGCvL_1-FyCry8o,9462 -alembic/testing/plugin/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -alembic/testing/plugin/__pycache__/__init__.cpython-312.pyc,, -alembic/testing/plugin/__pycache__/bootstrap.cpython-312.pyc,, -alembic/testing/plugin/bootstrap.py,sha256=9C6wtjGrIVztZ928w27hsQE0KcjDLIUtUN3dvZKsMVk,50 -alembic/testing/requirements.py,sha256=dKeAO1l5TwBqXarJN-IPORlCqCJv-41Dj6oXoEikxHQ,5133 -alembic/testing/schemacompare.py,sha256=N5UqSNCOJetIKC4vKhpYzQEpj08XkdgIoqBmEPQ3tlc,4838 -alembic/testing/suite/__init__.py,sha256=MvE7-hwbaVN1q3NM-ztGxORU9dnIelUCINKqNxewn7Y,288 -alembic/testing/suite/__pycache__/__init__.cpython-312.pyc,, -alembic/testing/suite/__pycache__/_autogen_fixtures.cpython-312.pyc,, -alembic/testing/suite/__pycache__/test_autogen_comments.cpython-312.pyc,, -alembic/testing/suite/__pycache__/test_autogen_computed.cpython-312.pyc,, -alembic/testing/suite/__pycache__/test_autogen_diffs.cpython-312.pyc,, -alembic/testing/suite/__pycache__/test_autogen_fks.cpython-312.pyc,, -alembic/testing/suite/__pycache__/test_autogen_identity.cpython-312.pyc,, -alembic/testing/suite/__pycache__/test_environment.cpython-312.pyc,, -alembic/testing/suite/__pycache__/test_op.cpython-312.pyc,, -alembic/testing/suite/_autogen_fixtures.py,sha256=cDq1pmzHe15S6dZPGNC6sqFaCQ3hLT_oPV2IDigUGQ0,9880 -alembic/testing/suite/test_autogen_comments.py,sha256=aEGqKUDw4kHjnDk298aoGcQvXJWmZXcIX_2FxH4cJK8,6283 -alembic/testing/suite/test_autogen_computed.py,sha256=CXAeF-5Wr2cmW8PB7ztHG_4ZQsn1gSWrHWfxi72grNU,6147 -alembic/testing/suite/test_autogen_diffs.py,sha256=T4SR1n_kmcOKYhR4W1-dA0e5sddJ69DSVL2HW96kAkE,8394 -alembic/testing/suite/test_autogen_fks.py,sha256=AqFmb26Buex167HYa9dZWOk8x-JlB1OK3bwcvvjDFaU,32927 -alembic/testing/suite/test_autogen_identity.py,sha256=kcuqngG7qXAKPJDX4U8sRzPKHEJECHuZ0DtuaS6tVkk,5824 -alembic/testing/suite/test_environment.py,sha256=OwD-kpESdLoc4byBrGrXbZHvqtPbzhFCG4W9hJOJXPQ,11877 -alembic/testing/suite/test_op.py,sha256=2XQCdm_NmnPxHGuGj7hmxMzIhKxXNotUsKdACXzE1mM,1343 -alembic/testing/util.py,sha256=CQrcQDA8fs_7ME85z5ydb-Bt70soIIID-qNY1vbR2dg,3350 -alembic/testing/warnings.py,sha256=RxA7x_8GseANgw07Us8JN_1iGbANxaw6_VitX2ZGQH4,1078 -alembic/util/__init__.py,sha256=KSZ7UT2YzH6CietgUtljVoE3QnGjoFKOi7RL5sgUxrk,1688 -alembic/util/__pycache__/__init__.cpython-312.pyc,, -alembic/util/__pycache__/compat.cpython-312.pyc,, -alembic/util/__pycache__/editor.cpython-312.pyc,, -alembic/util/__pycache__/exc.cpython-312.pyc,, -alembic/util/__pycache__/langhelpers.cpython-312.pyc,, -alembic/util/__pycache__/messaging.cpython-312.pyc,, -alembic/util/__pycache__/pyfiles.cpython-312.pyc,, -alembic/util/__pycache__/sqla_compat.cpython-312.pyc,, -alembic/util/compat.py,sha256=RjHdQa1NomU3Zlvgfvza0OMiSRQSLRL3xVl3OdUy2UE,2594 -alembic/util/editor.py,sha256=JIz6_BdgV8_oKtnheR6DZoB7qnrHrlRgWjx09AsTsUw,2546 -alembic/util/exc.py,sha256=KQTru4zcgAmN4IxLMwLFS56XToUewaXB7oOLcPNjPwg,98 -alembic/util/langhelpers.py,sha256=LpOcovnhMnP45kTt8zNJ4BHpyQrlF40OL6yDXjqKtsE,10026 -alembic/util/messaging.py,sha256=BxAHiJsYHBPb2m8zv4yaueSRAlVuYXWkRCeN02JXhqw,3250 -alembic/util/pyfiles.py,sha256=zltVdcwEJJCPS2gHsQvkHkQakuF6wXiZ6zfwHbGNT0g,3489 -alembic/util/sqla_compat.py,sha256=XMfZaLdbVbJoniNUyI3RUUXu4gCWljjVBbJ7db6vCgc,19526 diff --git a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/WHEEL b/myenv/Lib/site-packages/alembic-1.13.3.dist-info/WHEEL deleted file mode 100644 index dcfdc6e..0000000 --- a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.1.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/entry_points.txt b/myenv/Lib/site-packages/alembic-1.13.3.dist-info/entry_points.txt deleted file mode 100644 index 5945268..0000000 --- a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -alembic = alembic.config:main diff --git a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/top_level.txt b/myenv/Lib/site-packages/alembic-1.13.3.dist-info/top_level.txt deleted file mode 100644 index b5bd98d..0000000 --- a/myenv/Lib/site-packages/alembic-1.13.3.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -alembic diff --git a/myenv/Lib/site-packages/alembic/__init__.py b/myenv/Lib/site-packages/alembic/__init__.py deleted file mode 100644 index acf69a6..0000000 --- a/myenv/Lib/site-packages/alembic/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from . import context -from . import op - -__version__ = "1.13.3" diff --git a/myenv/Lib/site-packages/alembic/__main__.py b/myenv/Lib/site-packages/alembic/__main__.py deleted file mode 100644 index af1b8e8..0000000 --- a/myenv/Lib/site-packages/alembic/__main__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .config import main - -if __name__ == "__main__": - main(prog="alembic") diff --git a/myenv/Lib/site-packages/alembic/autogenerate/__init__.py b/myenv/Lib/site-packages/alembic/autogenerate/__init__.py deleted file mode 100644 index 445ddb2..0000000 --- a/myenv/Lib/site-packages/alembic/autogenerate/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -from .api import _render_migration_diffs as _render_migration_diffs -from .api import compare_metadata as compare_metadata -from .api import produce_migrations as produce_migrations -from .api import render_python_code as render_python_code -from .api import RevisionContext as RevisionContext -from .compare import _produce_net_changes as _produce_net_changes -from .compare import comparators as comparators -from .render import render_op_text as render_op_text -from .render import renderers as renderers -from .rewriter import Rewriter as Rewriter diff --git a/myenv/Lib/site-packages/alembic/autogenerate/api.py b/myenv/Lib/site-packages/alembic/autogenerate/api.py deleted file mode 100644 index 4c03916..0000000 --- a/myenv/Lib/site-packages/alembic/autogenerate/api.py +++ /dev/null @@ -1,650 +0,0 @@ -from __future__ import annotations - -import contextlib -from typing import Any -from typing import Dict -from typing import Iterator -from typing import List -from typing import Optional -from typing import Sequence -from typing import Set -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy import inspect - -from . import compare -from . import render -from .. import util -from ..operations import ops -from ..util import sqla_compat - -"""Provide the 'autogenerate' feature which can produce migration operations -automatically.""" - -if TYPE_CHECKING: - from sqlalchemy.engine import Connection - from sqlalchemy.engine import Dialect - from sqlalchemy.engine import Inspector - from sqlalchemy.sql.schema import MetaData - from sqlalchemy.sql.schema import SchemaItem - from sqlalchemy.sql.schema import Table - - from ..config import Config - from ..operations.ops import DowngradeOps - from ..operations.ops import MigrationScript - from ..operations.ops import UpgradeOps - from ..runtime.environment import NameFilterParentNames - from ..runtime.environment import NameFilterType - from ..runtime.environment import ProcessRevisionDirectiveFn - from ..runtime.environment import RenderItemFn - from ..runtime.migration import MigrationContext - from ..script.base import Script - from ..script.base import ScriptDirectory - from ..script.revision import _GetRevArg - - -def compare_metadata(context: MigrationContext, metadata: MetaData) -> Any: - """Compare a database schema to that given in a - :class:`~sqlalchemy.schema.MetaData` instance. - - The database connection is presented in the context - of a :class:`.MigrationContext` object, which - provides database connectivity as well as optional - comparison functions to use for datatypes and - server defaults - see the "autogenerate" arguments - at :meth:`.EnvironmentContext.configure` - for details on these. - - The return format is a list of "diff" directives, - each representing individual differences:: - - from alembic.migration import MigrationContext - from alembic.autogenerate import compare_metadata - from sqlalchemy import ( - create_engine, - MetaData, - Column, - Integer, - String, - Table, - text, - ) - import pprint - - engine = create_engine("sqlite://") - - with engine.begin() as conn: - conn.execute( - text( - ''' - create table foo ( - id integer not null primary key, - old_data varchar, - x integer - ) - ''' - ) - ) - conn.execute(text("create table bar (data varchar)")) - - metadata = MetaData() - Table( - "foo", - metadata, - Column("id", Integer, primary_key=True), - Column("data", Integer), - Column("x", Integer, nullable=False), - ) - Table("bat", metadata, Column("info", String)) - - mc = MigrationContext.configure(engine.connect()) - - diff = compare_metadata(mc, metadata) - pprint.pprint(diff, indent=2, width=20) - - Output:: - - [ - ( - "add_table", - Table( - "bat", - MetaData(), - Column("info", String(), table=), - schema=None, - ), - ), - ( - "remove_table", - Table( - "bar", - MetaData(), - Column("data", VARCHAR(), table=), - schema=None, - ), - ), - ( - "add_column", - None, - "foo", - Column("data", Integer(), table=), - ), - [ - ( - "modify_nullable", - None, - "foo", - "x", - { - "existing_comment": None, - "existing_server_default": False, - "existing_type": INTEGER(), - }, - True, - False, - ) - ], - ( - "remove_column", - None, - "foo", - Column("old_data", VARCHAR(), table=), - ), - ] - - :param context: a :class:`.MigrationContext` - instance. - :param metadata: a :class:`~sqlalchemy.schema.MetaData` - instance. - - .. seealso:: - - :func:`.produce_migrations` - produces a :class:`.MigrationScript` - structure based on metadata comparison. - - """ - - migration_script = produce_migrations(context, metadata) - assert migration_script.upgrade_ops is not None - return migration_script.upgrade_ops.as_diffs() - - -def produce_migrations( - context: MigrationContext, metadata: MetaData -) -> MigrationScript: - """Produce a :class:`.MigrationScript` structure based on schema - comparison. - - This function does essentially what :func:`.compare_metadata` does, - but then runs the resulting list of diffs to produce the full - :class:`.MigrationScript` object. For an example of what this looks like, - see the example in :ref:`customizing_revision`. - - .. seealso:: - - :func:`.compare_metadata` - returns more fundamental "diff" - data from comparing a schema. - - """ - - autogen_context = AutogenContext(context, metadata=metadata) - - migration_script = ops.MigrationScript( - rev_id=None, - upgrade_ops=ops.UpgradeOps([]), - downgrade_ops=ops.DowngradeOps([]), - ) - - compare._populate_migration_script(autogen_context, migration_script) - - return migration_script - - -def render_python_code( - up_or_down_op: Union[UpgradeOps, DowngradeOps], - sqlalchemy_module_prefix: str = "sa.", - alembic_module_prefix: str = "op.", - render_as_batch: bool = False, - imports: Sequence[str] = (), - render_item: Optional[RenderItemFn] = None, - migration_context: Optional[MigrationContext] = None, - user_module_prefix: Optional[str] = None, -) -> str: - """Render Python code given an :class:`.UpgradeOps` or - :class:`.DowngradeOps` object. - - This is a convenience function that can be used to test the - autogenerate output of a user-defined :class:`.MigrationScript` structure. - - :param up_or_down_op: :class:`.UpgradeOps` or :class:`.DowngradeOps` object - :param sqlalchemy_module_prefix: module prefix for SQLAlchemy objects - :param alembic_module_prefix: module prefix for Alembic constructs - :param render_as_batch: use "batch operations" style for rendering - :param imports: sequence of import symbols to add - :param render_item: callable to render items - :param migration_context: optional :class:`.MigrationContext` - :param user_module_prefix: optional string prefix for user-defined types - - .. versionadded:: 1.11.0 - - """ - opts = { - "sqlalchemy_module_prefix": sqlalchemy_module_prefix, - "alembic_module_prefix": alembic_module_prefix, - "render_item": render_item, - "render_as_batch": render_as_batch, - "user_module_prefix": user_module_prefix, - } - - if migration_context is None: - from ..runtime.migration import MigrationContext - from sqlalchemy.engine.default import DefaultDialect - - migration_context = MigrationContext.configure( - dialect=DefaultDialect() - ) - - autogen_context = AutogenContext(migration_context, opts=opts) - autogen_context.imports = set(imports) - return render._indent( - render._render_cmd_body(up_or_down_op, autogen_context) - ) - - -def _render_migration_diffs( - context: MigrationContext, template_args: Dict[Any, Any] -) -> None: - """legacy, used by test_autogen_composition at the moment""" - - autogen_context = AutogenContext(context) - - upgrade_ops = ops.UpgradeOps([]) - compare._produce_net_changes(autogen_context, upgrade_ops) - - migration_script = ops.MigrationScript( - rev_id=None, - upgrade_ops=upgrade_ops, - downgrade_ops=upgrade_ops.reverse(), - ) - - render._render_python_into_templatevars( - autogen_context, migration_script, template_args - ) - - -class AutogenContext: - """Maintains configuration and state that's specific to an - autogenerate operation.""" - - metadata: Optional[MetaData] = None - """The :class:`~sqlalchemy.schema.MetaData` object - representing the destination. - - This object is the one that is passed within ``env.py`` - to the :paramref:`.EnvironmentContext.configure.target_metadata` - parameter. It represents the structure of :class:`.Table` and other - objects as stated in the current database model, and represents the - destination structure for the database being examined. - - While the :class:`~sqlalchemy.schema.MetaData` object is primarily - known as a collection of :class:`~sqlalchemy.schema.Table` objects, - it also has an :attr:`~sqlalchemy.schema.MetaData.info` dictionary - that may be used by end-user schemes to store additional schema-level - objects that are to be compared in custom autogeneration schemes. - - """ - - connection: Optional[Connection] = None - """The :class:`~sqlalchemy.engine.base.Connection` object currently - connected to the database backend being compared. - - This is obtained from the :attr:`.MigrationContext.bind` and is - ultimately set up in the ``env.py`` script. - - """ - - dialect: Optional[Dialect] = None - """The :class:`~sqlalchemy.engine.Dialect` object currently in use. - - This is normally obtained from the - :attr:`~sqlalchemy.engine.base.Connection.dialect` attribute. - - """ - - imports: Set[str] = None # type: ignore[assignment] - """A ``set()`` which contains string Python import directives. - - The directives are to be rendered into the ``${imports}`` section - of a script template. The set is normally empty and can be modified - within hooks such as the - :paramref:`.EnvironmentContext.configure.render_item` hook. - - .. seealso:: - - :ref:`autogen_render_types` - - """ - - migration_context: MigrationContext = None # type: ignore[assignment] - """The :class:`.MigrationContext` established by the ``env.py`` script.""" - - def __init__( - self, - migration_context: MigrationContext, - metadata: Optional[MetaData] = None, - opts: Optional[Dict[str, Any]] = None, - autogenerate: bool = True, - ) -> None: - if ( - autogenerate - and migration_context is not None - and migration_context.as_sql - ): - raise util.CommandError( - "autogenerate can't use as_sql=True as it prevents querying " - "the database for schema information" - ) - - if opts is None: - opts = migration_context.opts - - self.metadata = metadata = ( - opts.get("target_metadata", None) if metadata is None else metadata - ) - - if ( - autogenerate - and metadata is None - and migration_context is not None - and migration_context.script is not None - ): - raise util.CommandError( - "Can't proceed with --autogenerate option; environment " - "script %s does not provide " - "a MetaData object or sequence of objects to the context." - % (migration_context.script.env_py_location) - ) - - include_object = opts.get("include_object", None) - include_name = opts.get("include_name", None) - - object_filters = [] - name_filters = [] - if include_object: - object_filters.append(include_object) - if include_name: - name_filters.append(include_name) - - self._object_filters = object_filters - self._name_filters = name_filters - - self.migration_context = migration_context - if self.migration_context is not None: - self.connection = self.migration_context.bind - self.dialect = self.migration_context.dialect - - self.imports = set() - self.opts: Dict[str, Any] = opts - self._has_batch: bool = False - - @util.memoized_property - def inspector(self) -> Inspector: - if self.connection is None: - raise TypeError( - "can't return inspector as this " - "AutogenContext has no database connection" - ) - return inspect(self.connection) - - @contextlib.contextmanager - def _within_batch(self) -> Iterator[None]: - self._has_batch = True - yield - self._has_batch = False - - def run_name_filters( - self, - name: Optional[str], - type_: NameFilterType, - parent_names: NameFilterParentNames, - ) -> bool: - """Run the context's name filters and return True if the targets - should be part of the autogenerate operation. - - This method should be run for every kind of name encountered within the - reflection side of an autogenerate operation, giving the environment - the chance to filter what names should be reflected as database - objects. The filters here are produced directly via the - :paramref:`.EnvironmentContext.configure.include_name` parameter. - - """ - if "schema_name" in parent_names: - if type_ == "table": - table_name = name - else: - table_name = parent_names.get("table_name", None) - if table_name: - schema_name = parent_names["schema_name"] - if schema_name: - parent_names["schema_qualified_table_name"] = "%s.%s" % ( - schema_name, - table_name, - ) - else: - parent_names["schema_qualified_table_name"] = table_name - - for fn in self._name_filters: - if not fn(name, type_, parent_names): - return False - else: - return True - - def run_object_filters( - self, - object_: SchemaItem, - name: sqla_compat._ConstraintName, - type_: NameFilterType, - reflected: bool, - compare_to: Optional[SchemaItem], - ) -> bool: - """Run the context's object filters and return True if the targets - should be part of the autogenerate operation. - - This method should be run for every kind of object encountered within - an autogenerate operation, giving the environment the chance - to filter what objects should be included in the comparison. - The filters here are produced directly via the - :paramref:`.EnvironmentContext.configure.include_object` parameter. - - """ - for fn in self._object_filters: - if not fn(object_, name, type_, reflected, compare_to): - return False - else: - return True - - run_filters = run_object_filters - - @util.memoized_property - def sorted_tables(self) -> List[Table]: - """Return an aggregate of the :attr:`.MetaData.sorted_tables` - collection(s). - - For a sequence of :class:`.MetaData` objects, this - concatenates the :attr:`.MetaData.sorted_tables` collection - for each individual :class:`.MetaData` in the order of the - sequence. It does **not** collate the sorted tables collections. - - """ - result = [] - for m in util.to_list(self.metadata): - result.extend(m.sorted_tables) - return result - - @util.memoized_property - def table_key_to_table(self) -> Dict[str, Table]: - """Return an aggregate of the :attr:`.MetaData.tables` dictionaries. - - The :attr:`.MetaData.tables` collection is a dictionary of table key - to :class:`.Table`; this method aggregates the dictionary across - multiple :class:`.MetaData` objects into one dictionary. - - Duplicate table keys are **not** supported; if two :class:`.MetaData` - objects contain the same table key, an exception is raised. - - """ - result: Dict[str, Table] = {} - for m in util.to_list(self.metadata): - intersect = set(result).intersection(set(m.tables)) - if intersect: - raise ValueError( - "Duplicate table keys across multiple " - "MetaData objects: %s" - % (", ".join('"%s"' % key for key in sorted(intersect))) - ) - - result.update(m.tables) - return result - - -class RevisionContext: - """Maintains configuration and state that's specific to a revision - file generation operation.""" - - generated_revisions: List[MigrationScript] - process_revision_directives: Optional[ProcessRevisionDirectiveFn] - - def __init__( - self, - config: Config, - script_directory: ScriptDirectory, - command_args: Dict[str, Any], - process_revision_directives: Optional[ - ProcessRevisionDirectiveFn - ] = None, - ) -> None: - self.config = config - self.script_directory = script_directory - self.command_args = command_args - self.process_revision_directives = process_revision_directives - self.template_args = { - "config": config # Let templates use config for - # e.g. multiple databases - } - self.generated_revisions = [self._default_revision()] - - def _to_script( - self, migration_script: MigrationScript - ) -> Optional[Script]: - template_args: Dict[str, Any] = self.template_args.copy() - - if getattr(migration_script, "_needs_render", False): - autogen_context = self._last_autogen_context - - # clear out existing imports if we are doing multiple - # renders - autogen_context.imports = set() - if migration_script.imports: - autogen_context.imports.update(migration_script.imports) - render._render_python_into_templatevars( - autogen_context, migration_script, template_args - ) - - assert migration_script.rev_id is not None - return self.script_directory.generate_revision( - migration_script.rev_id, - migration_script.message, - refresh=True, - head=migration_script.head, - splice=migration_script.splice, - branch_labels=migration_script.branch_label, - version_path=migration_script.version_path, - depends_on=migration_script.depends_on, - **template_args, - ) - - def run_autogenerate( - self, rev: _GetRevArg, migration_context: MigrationContext - ) -> None: - self._run_environment(rev, migration_context, True) - - def run_no_autogenerate( - self, rev: _GetRevArg, migration_context: MigrationContext - ) -> None: - self._run_environment(rev, migration_context, False) - - def _run_environment( - self, - rev: _GetRevArg, - migration_context: MigrationContext, - autogenerate: bool, - ) -> None: - if autogenerate: - if self.command_args["sql"]: - raise util.CommandError( - "Using --sql with --autogenerate does not make any sense" - ) - if set(self.script_directory.get_revisions(rev)) != set( - self.script_directory.get_revisions("heads") - ): - raise util.CommandError("Target database is not up to date.") - - upgrade_token = migration_context.opts["upgrade_token"] - downgrade_token = migration_context.opts["downgrade_token"] - - migration_script = self.generated_revisions[-1] - if not getattr(migration_script, "_needs_render", False): - migration_script.upgrade_ops_list[-1].upgrade_token = upgrade_token - migration_script.downgrade_ops_list[-1].downgrade_token = ( - downgrade_token - ) - migration_script._needs_render = True - else: - migration_script._upgrade_ops.append( - ops.UpgradeOps([], upgrade_token=upgrade_token) - ) - migration_script._downgrade_ops.append( - ops.DowngradeOps([], downgrade_token=downgrade_token) - ) - - autogen_context = AutogenContext( - migration_context, autogenerate=autogenerate - ) - self._last_autogen_context: AutogenContext = autogen_context - - if autogenerate: - compare._populate_migration_script( - autogen_context, migration_script - ) - - if self.process_revision_directives: - self.process_revision_directives( - migration_context, rev, self.generated_revisions - ) - - hook = migration_context.opts["process_revision_directives"] - if hook: - hook(migration_context, rev, self.generated_revisions) - - for migration_script in self.generated_revisions: - migration_script._needs_render = True - - def _default_revision(self) -> MigrationScript: - command_args: Dict[str, Any] = self.command_args - op = ops.MigrationScript( - rev_id=command_args["rev_id"] or util.rev_id(), - message=command_args["message"], - upgrade_ops=ops.UpgradeOps([]), - downgrade_ops=ops.DowngradeOps([]), - head=command_args["head"], - splice=command_args["splice"], - branch_label=command_args["branch_label"], - version_path=command_args["version_path"], - depends_on=command_args["depends_on"], - ) - return op - - def generate_scripts(self) -> Iterator[Optional[Script]]: - for generated_revision in self.generated_revisions: - yield self._to_script(generated_revision) diff --git a/myenv/Lib/site-packages/alembic/autogenerate/compare.py b/myenv/Lib/site-packages/alembic/autogenerate/compare.py deleted file mode 100644 index 0d98519..0000000 --- a/myenv/Lib/site-packages/alembic/autogenerate/compare.py +++ /dev/null @@ -1,1329 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import contextlib -import logging -import re -from typing import Any -from typing import cast -from typing import Dict -from typing import Iterator -from typing import Mapping -from typing import Optional -from typing import Set -from typing import Tuple -from typing import TYPE_CHECKING -from typing import TypeVar -from typing import Union - -from sqlalchemy import event -from sqlalchemy import inspect -from sqlalchemy import schema as sa_schema -from sqlalchemy import text -from sqlalchemy import types as sqltypes -from sqlalchemy.sql import expression -from sqlalchemy.sql.schema import ForeignKeyConstraint -from sqlalchemy.sql.schema import Index -from sqlalchemy.sql.schema import UniqueConstraint -from sqlalchemy.util import OrderedSet - -from .. import util -from ..ddl._autogen import is_index_sig -from ..ddl._autogen import is_uq_sig -from ..operations import ops -from ..util import sqla_compat - -if TYPE_CHECKING: - from typing import Literal - - from sqlalchemy.engine.reflection import Inspector - from sqlalchemy.sql.elements import quoted_name - from sqlalchemy.sql.elements import TextClause - from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Table - - from alembic.autogenerate.api import AutogenContext - from alembic.ddl.impl import DefaultImpl - from alembic.operations.ops import AlterColumnOp - from alembic.operations.ops import MigrationScript - from alembic.operations.ops import ModifyTableOps - from alembic.operations.ops import UpgradeOps - from ..ddl._autogen import _constraint_sig - - -log = logging.getLogger(__name__) - - -def _populate_migration_script( - autogen_context: AutogenContext, migration_script: MigrationScript -) -> None: - upgrade_ops = migration_script.upgrade_ops_list[-1] - downgrade_ops = migration_script.downgrade_ops_list[-1] - - _produce_net_changes(autogen_context, upgrade_ops) - upgrade_ops.reverse_into(downgrade_ops) - - -comparators = util.Dispatcher(uselist=True) - - -def _produce_net_changes( - autogen_context: AutogenContext, upgrade_ops: UpgradeOps -) -> None: - connection = autogen_context.connection - assert connection is not None - include_schemas = autogen_context.opts.get("include_schemas", False) - - inspector: Inspector = inspect(connection) - - default_schema = connection.dialect.default_schema_name - schemas: Set[Optional[str]] - if include_schemas: - schemas = set(inspector.get_schema_names()) - # replace default schema name with None - schemas.discard("information_schema") - # replace the "default" schema with None - schemas.discard(default_schema) - schemas.add(None) - else: - schemas = {None} - - schemas = { - s for s in schemas if autogen_context.run_name_filters(s, "schema", {}) - } - - assert autogen_context.dialect is not None - comparators.dispatch("schema", autogen_context.dialect.name)( - autogen_context, upgrade_ops, schemas - ) - - -@comparators.dispatch_for("schema") -def _autogen_for_tables( - autogen_context: AutogenContext, - upgrade_ops: UpgradeOps, - schemas: Union[Set[None], Set[Optional[str]]], -) -> None: - inspector = autogen_context.inspector - - conn_table_names: Set[Tuple[Optional[str], str]] = set() - - version_table_schema = ( - autogen_context.migration_context.version_table_schema - ) - version_table = autogen_context.migration_context.version_table - - for schema_name in schemas: - tables = set(inspector.get_table_names(schema=schema_name)) - if schema_name == version_table_schema: - tables = tables.difference( - [autogen_context.migration_context.version_table] - ) - - conn_table_names.update( - (schema_name, tname) - for tname in tables - if autogen_context.run_name_filters( - tname, "table", {"schema_name": schema_name} - ) - ) - - metadata_table_names = OrderedSet( - [(table.schema, table.name) for table in autogen_context.sorted_tables] - ).difference([(version_table_schema, version_table)]) - - _compare_tables( - conn_table_names, - metadata_table_names, - inspector, - upgrade_ops, - autogen_context, - ) - - -def _compare_tables( - conn_table_names: set, - metadata_table_names: set, - inspector: Inspector, - upgrade_ops: UpgradeOps, - autogen_context: AutogenContext, -) -> None: - default_schema = inspector.bind.dialect.default_schema_name - - # tables coming from the connection will not have "schema" - # set if it matches default_schema_name; so we need a list - # of table names from local metadata that also have "None" if schema - # == default_schema_name. Most setups will be like this anyway but - # some are not (see #170) - metadata_table_names_no_dflt_schema = OrderedSet( - [ - (schema if schema != default_schema else None, tname) - for schema, tname in metadata_table_names - ] - ) - - # to adjust for the MetaData collection storing the tables either - # as "schemaname.tablename" or just "tablename", create a new lookup - # which will match the "non-default-schema" keys to the Table object. - tname_to_table = { - no_dflt_schema: autogen_context.table_key_to_table[ - sa_schema._get_table_key(tname, schema) - ] - for no_dflt_schema, (schema, tname) in zip( - metadata_table_names_no_dflt_schema, metadata_table_names - ) - } - metadata_table_names = metadata_table_names_no_dflt_schema - - for s, tname in metadata_table_names.difference(conn_table_names): - name = "%s.%s" % (s, tname) if s else tname - metadata_table = tname_to_table[(s, tname)] - if autogen_context.run_object_filters( - metadata_table, tname, "table", False, None - ): - upgrade_ops.ops.append( - ops.CreateTableOp.from_table(metadata_table) - ) - log.info("Detected added table %r", name) - modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) - - comparators.dispatch("table")( - autogen_context, - modify_table_ops, - s, - tname, - None, - metadata_table, - ) - if not modify_table_ops.is_empty(): - upgrade_ops.ops.append(modify_table_ops) - - removal_metadata = sa_schema.MetaData() - for s, tname in conn_table_names.difference(metadata_table_names): - name = sa_schema._get_table_key(tname, s) - exists = name in removal_metadata.tables - t = sa_schema.Table(tname, removal_metadata, schema=s) - - if not exists: - event.listen( - t, - "column_reflect", - # fmt: off - autogen_context.migration_context.impl. - _compat_autogen_column_reflect - (inspector), - # fmt: on - ) - sqla_compat._reflect_table(inspector, t) - if autogen_context.run_object_filters(t, tname, "table", True, None): - modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) - - comparators.dispatch("table")( - autogen_context, modify_table_ops, s, tname, t, None - ) - if not modify_table_ops.is_empty(): - upgrade_ops.ops.append(modify_table_ops) - - upgrade_ops.ops.append(ops.DropTableOp.from_table(t)) - log.info("Detected removed table %r", name) - - existing_tables = conn_table_names.intersection(metadata_table_names) - - existing_metadata = sa_schema.MetaData() - conn_column_info = {} - for s, tname in existing_tables: - name = sa_schema._get_table_key(tname, s) - exists = name in existing_metadata.tables - t = sa_schema.Table(tname, existing_metadata, schema=s) - if not exists: - event.listen( - t, - "column_reflect", - # fmt: off - autogen_context.migration_context.impl. - _compat_autogen_column_reflect(inspector), - # fmt: on - ) - sqla_compat._reflect_table(inspector, t) - conn_column_info[(s, tname)] = t - - for s, tname in sorted(existing_tables, key=lambda x: (x[0] or "", x[1])): - s = s or None - name = "%s.%s" % (s, tname) if s else tname - metadata_table = tname_to_table[(s, tname)] - conn_table = existing_metadata.tables[name] - - if autogen_context.run_object_filters( - metadata_table, tname, "table", False, conn_table - ): - modify_table_ops = ops.ModifyTableOps(tname, [], schema=s) - with _compare_columns( - s, - tname, - conn_table, - metadata_table, - modify_table_ops, - autogen_context, - inspector, - ): - comparators.dispatch("table")( - autogen_context, - modify_table_ops, - s, - tname, - conn_table, - metadata_table, - ) - - if not modify_table_ops.is_empty(): - upgrade_ops.ops.append(modify_table_ops) - - -_IndexColumnSortingOps: Mapping[str, Any] = util.immutabledict( - { - "asc": expression.asc, - "desc": expression.desc, - "nulls_first": expression.nullsfirst, - "nulls_last": expression.nullslast, - "nullsfirst": expression.nullsfirst, # 1_3 name - "nullslast": expression.nullslast, # 1_3 name - } -) - - -def _make_index( - impl: DefaultImpl, params: Dict[str, Any], conn_table: Table -) -> Optional[Index]: - exprs: list[Union[Column[Any], TextClause]] = [] - sorting = params.get("column_sorting") - - for num, col_name in enumerate(params["column_names"]): - item: Union[Column[Any], TextClause] - if col_name is None: - assert "expressions" in params - name = params["expressions"][num] - item = text(name) - else: - name = col_name - item = conn_table.c[col_name] - if sorting and name in sorting: - for operator in sorting[name]: - if operator in _IndexColumnSortingOps: - item = _IndexColumnSortingOps[operator](item) - exprs.append(item) - ix = sa_schema.Index( - params["name"], - *exprs, - unique=params["unique"], - _table=conn_table, - **impl.adjust_reflected_dialect_options(params, "index"), - ) - if "duplicates_constraint" in params: - ix.info["duplicates_constraint"] = params["duplicates_constraint"] - return ix - - -def _make_unique_constraint( - impl: DefaultImpl, params: Dict[str, Any], conn_table: Table -) -> UniqueConstraint: - uq = sa_schema.UniqueConstraint( - *[conn_table.c[cname] for cname in params["column_names"]], - name=params["name"], - **impl.adjust_reflected_dialect_options(params, "unique_constraint"), - ) - if "duplicates_index" in params: - uq.info["duplicates_index"] = params["duplicates_index"] - - return uq - - -def _make_foreign_key( - params: Dict[str, Any], conn_table: Table -) -> ForeignKeyConstraint: - tname = params["referred_table"] - if params["referred_schema"]: - tname = "%s.%s" % (params["referred_schema"], tname) - - options = params.get("options", {}) - - const = sa_schema.ForeignKeyConstraint( - [conn_table.c[cname] for cname in params["constrained_columns"]], - ["%s.%s" % (tname, n) for n in params["referred_columns"]], - onupdate=options.get("onupdate"), - ondelete=options.get("ondelete"), - deferrable=options.get("deferrable"), - initially=options.get("initially"), - name=params["name"], - ) - # needed by 0.7 - conn_table.append_constraint(const) - return const - - -@contextlib.contextmanager -def _compare_columns( - schema: Optional[str], - tname: Union[quoted_name, str], - conn_table: Table, - metadata_table: Table, - modify_table_ops: ModifyTableOps, - autogen_context: AutogenContext, - inspector: Inspector, -) -> Iterator[None]: - name = "%s.%s" % (schema, tname) if schema else tname - metadata_col_names = OrderedSet( - c.name for c in metadata_table.c if not c.system - ) - metadata_cols_by_name = { - c.name: c for c in metadata_table.c if not c.system - } - - conn_col_names = { - c.name: c - for c in conn_table.c - if autogen_context.run_name_filters( - c.name, "column", {"table_name": tname, "schema_name": schema} - ) - } - - for cname in metadata_col_names.difference(conn_col_names): - if autogen_context.run_object_filters( - metadata_cols_by_name[cname], cname, "column", False, None - ): - modify_table_ops.ops.append( - ops.AddColumnOp.from_column_and_tablename( - schema, tname, metadata_cols_by_name[cname] - ) - ) - log.info("Detected added column '%s.%s'", name, cname) - - for colname in metadata_col_names.intersection(conn_col_names): - metadata_col = metadata_cols_by_name[colname] - conn_col = conn_table.c[colname] - if not autogen_context.run_object_filters( - metadata_col, colname, "column", False, conn_col - ): - continue - alter_column_op = ops.AlterColumnOp(tname, colname, schema=schema) - - comparators.dispatch("column")( - autogen_context, - alter_column_op, - schema, - tname, - colname, - conn_col, - metadata_col, - ) - - if alter_column_op.has_changes(): - modify_table_ops.ops.append(alter_column_op) - - yield - - for cname in set(conn_col_names).difference(metadata_col_names): - if autogen_context.run_object_filters( - conn_table.c[cname], cname, "column", True, None - ): - modify_table_ops.ops.append( - ops.DropColumnOp.from_column_and_tablename( - schema, tname, conn_table.c[cname] - ) - ) - log.info("Detected removed column '%s.%s'", name, cname) - - -_C = TypeVar("_C", bound=Union[UniqueConstraint, ForeignKeyConstraint, Index]) - - -@comparators.dispatch_for("table") -def _compare_indexes_and_uniques( - autogen_context: AutogenContext, - modify_ops: ModifyTableOps, - schema: Optional[str], - tname: Union[quoted_name, str], - conn_table: Optional[Table], - metadata_table: Optional[Table], -) -> None: - inspector = autogen_context.inspector - is_create_table = conn_table is None - is_drop_table = metadata_table is None - impl = autogen_context.migration_context.impl - - # 1a. get raw indexes and unique constraints from metadata ... - if metadata_table is not None: - metadata_unique_constraints = { - uq - for uq in metadata_table.constraints - if isinstance(uq, sa_schema.UniqueConstraint) - } - metadata_indexes = set(metadata_table.indexes) - else: - metadata_unique_constraints = set() - metadata_indexes = set() - - conn_uniques = conn_indexes = frozenset() # type:ignore[var-annotated] - - supports_unique_constraints = False - - unique_constraints_duplicate_unique_indexes = False - - if conn_table is not None: - # 1b. ... and from connection, if the table exists - try: - conn_uniques = inspector.get_unique_constraints( # type:ignore[assignment] # noqa - tname, schema=schema - ) - supports_unique_constraints = True - except NotImplementedError: - pass - except TypeError: - # number of arguments is off for the base - # method in SQLAlchemy due to the cache decorator - # not being present - pass - else: - conn_uniques = [ # type:ignore[assignment] - uq - for uq in conn_uniques - if autogen_context.run_name_filters( - uq["name"], - "unique_constraint", - {"table_name": tname, "schema_name": schema}, - ) - ] - for uq in conn_uniques: - if uq.get("duplicates_index"): - unique_constraints_duplicate_unique_indexes = True - try: - conn_indexes = inspector.get_indexes( # type:ignore[assignment] - tname, schema=schema - ) - except NotImplementedError: - pass - else: - conn_indexes = [ # type:ignore[assignment] - ix - for ix in conn_indexes - if autogen_context.run_name_filters( - ix["name"], - "index", - {"table_name": tname, "schema_name": schema}, - ) - ] - - # 2. convert conn-level objects from raw inspector records - # into schema objects - if is_drop_table: - # for DROP TABLE uniques are inline, don't need them - conn_uniques = set() # type:ignore[assignment] - else: - conn_uniques = { # type:ignore[assignment] - _make_unique_constraint(impl, uq_def, conn_table) - for uq_def in conn_uniques - } - - conn_indexes = { # type:ignore[assignment] - index - for index in ( - _make_index(impl, ix, conn_table) for ix in conn_indexes - ) - if index is not None - } - - # 2a. if the dialect dupes unique indexes as unique constraints - # (mysql and oracle), correct for that - - if unique_constraints_duplicate_unique_indexes: - _correct_for_uq_duplicates_uix( - conn_uniques, - conn_indexes, - metadata_unique_constraints, - metadata_indexes, - autogen_context.dialect, - impl, - ) - - # 3. give the dialect a chance to omit indexes and constraints that - # we know are either added implicitly by the DB or that the DB - # can't accurately report on - impl.correct_for_autogen_constraints( - conn_uniques, # type: ignore[arg-type] - conn_indexes, # type: ignore[arg-type] - metadata_unique_constraints, - metadata_indexes, - ) - - # 4. organize the constraints into "signature" collections, the - # _constraint_sig() objects provide a consistent facade over both - # Index and UniqueConstraint so we can easily work with them - # interchangeably - metadata_unique_constraints_sig = { - impl._create_metadata_constraint_sig(uq) - for uq in metadata_unique_constraints - } - - metadata_indexes_sig = { - impl._create_metadata_constraint_sig(ix) for ix in metadata_indexes - } - - conn_unique_constraints = { - impl._create_reflected_constraint_sig(uq) for uq in conn_uniques - } - - conn_indexes_sig = { - impl._create_reflected_constraint_sig(ix) for ix in conn_indexes - } - - # 5. index things by name, for those objects that have names - metadata_names = { - cast(str, c.md_name_to_sql_name(autogen_context)): c - for c in metadata_unique_constraints_sig.union(metadata_indexes_sig) - if c.is_named - } - - conn_uniques_by_name: Dict[sqla_compat._ConstraintName, _constraint_sig] - conn_indexes_by_name: Dict[sqla_compat._ConstraintName, _constraint_sig] - - conn_uniques_by_name = {c.name: c for c in conn_unique_constraints} - conn_indexes_by_name = {c.name: c for c in conn_indexes_sig} - conn_names = { - c.name: c - for c in conn_unique_constraints.union(conn_indexes_sig) - if sqla_compat.constraint_name_string(c.name) - } - - doubled_constraints = { - name: (conn_uniques_by_name[name], conn_indexes_by_name[name]) - for name in set(conn_uniques_by_name).intersection( - conn_indexes_by_name - ) - } - - # 6. index things by "column signature", to help with unnamed unique - # constraints. - conn_uniques_by_sig = {uq.unnamed: uq for uq in conn_unique_constraints} - metadata_uniques_by_sig = { - uq.unnamed: uq for uq in metadata_unique_constraints_sig - } - unnamed_metadata_uniques = { - uq.unnamed: uq - for uq in metadata_unique_constraints_sig - if not sqla_compat._constraint_is_named( - uq.const, autogen_context.dialect - ) - } - - # assumptions: - # 1. a unique constraint or an index from the connection *always* - # has a name. - # 2. an index on the metadata side *always* has a name. - # 3. a unique constraint on the metadata side *might* have a name. - # 4. The backend may double up indexes as unique constraints and - # vice versa (e.g. MySQL, Postgresql) - - def obj_added(obj: _constraint_sig): - if is_index_sig(obj): - if autogen_context.run_object_filters( - obj.const, obj.name, "index", False, None - ): - modify_ops.ops.append(ops.CreateIndexOp.from_index(obj.const)) - log.info( - "Detected added index '%r' on '%s'", - obj.name, - obj.column_names, - ) - elif is_uq_sig(obj): - if not supports_unique_constraints: - # can't report unique indexes as added if we don't - # detect them - return - if is_create_table or is_drop_table: - # unique constraints are created inline with table defs - return - if autogen_context.run_object_filters( - obj.const, obj.name, "unique_constraint", False, None - ): - modify_ops.ops.append( - ops.AddConstraintOp.from_constraint(obj.const) - ) - log.info( - "Detected added unique constraint %r on '%s'", - obj.name, - obj.column_names, - ) - else: - assert False - - def obj_removed(obj: _constraint_sig): - if is_index_sig(obj): - if obj.is_unique and not supports_unique_constraints: - # many databases double up unique constraints - # as unique indexes. without that list we can't - # be sure what we're doing here - return - - if autogen_context.run_object_filters( - obj.const, obj.name, "index", True, None - ): - modify_ops.ops.append(ops.DropIndexOp.from_index(obj.const)) - log.info("Detected removed index %r on %r", obj.name, tname) - elif is_uq_sig(obj): - if is_create_table or is_drop_table: - # if the whole table is being dropped, we don't need to - # consider unique constraint separately - return - if autogen_context.run_object_filters( - obj.const, obj.name, "unique_constraint", True, None - ): - modify_ops.ops.append( - ops.DropConstraintOp.from_constraint(obj.const) - ) - log.info( - "Detected removed unique constraint %r on %r", - obj.name, - tname, - ) - else: - assert False - - def obj_changed( - old: _constraint_sig, - new: _constraint_sig, - msg: str, - ): - if is_index_sig(old): - assert is_index_sig(new) - - if autogen_context.run_object_filters( - new.const, new.name, "index", False, old.const - ): - log.info( - "Detected changed index %r on %r: %s", old.name, tname, msg - ) - modify_ops.ops.append(ops.DropIndexOp.from_index(old.const)) - modify_ops.ops.append(ops.CreateIndexOp.from_index(new.const)) - elif is_uq_sig(old): - assert is_uq_sig(new) - - if autogen_context.run_object_filters( - new.const, new.name, "unique_constraint", False, old.const - ): - log.info( - "Detected changed unique constraint %r on %r: %s", - old.name, - tname, - msg, - ) - modify_ops.ops.append( - ops.DropConstraintOp.from_constraint(old.const) - ) - modify_ops.ops.append( - ops.AddConstraintOp.from_constraint(new.const) - ) - else: - assert False - - for removed_name in sorted(set(conn_names).difference(metadata_names)): - conn_obj = conn_names[removed_name] - if ( - is_uq_sig(conn_obj) - and conn_obj.unnamed in unnamed_metadata_uniques - ): - continue - elif removed_name in doubled_constraints: - conn_uq, conn_idx = doubled_constraints[removed_name] - if ( - all( - conn_idx.unnamed != meta_idx.unnamed - for meta_idx in metadata_indexes_sig - ) - and conn_uq.unnamed not in metadata_uniques_by_sig - ): - obj_removed(conn_uq) - obj_removed(conn_idx) - else: - obj_removed(conn_obj) - - for existing_name in sorted(set(metadata_names).intersection(conn_names)): - metadata_obj = metadata_names[existing_name] - - if existing_name in doubled_constraints: - conn_uq, conn_idx = doubled_constraints[existing_name] - if is_index_sig(metadata_obj): - conn_obj = conn_idx - else: - conn_obj = conn_uq - else: - conn_obj = conn_names[existing_name] - - if type(conn_obj) != type(metadata_obj): - obj_removed(conn_obj) - obj_added(metadata_obj) - else: - comparison = metadata_obj.compare_to_reflected(conn_obj) - - if comparison.is_different: - # constraint are different - obj_changed(conn_obj, metadata_obj, comparison.message) - elif comparison.is_skip: - # constraint cannot be compared, skip them - thing = ( - "index" if is_index_sig(conn_obj) else "unique constraint" - ) - log.info( - "Cannot compare %s %r, assuming equal and skipping. %s", - thing, - conn_obj.name, - comparison.message, - ) - else: - # constraint are equal - assert comparison.is_equal - - for added_name in sorted(set(metadata_names).difference(conn_names)): - obj = metadata_names[added_name] - obj_added(obj) - - for uq_sig in unnamed_metadata_uniques: - if uq_sig not in conn_uniques_by_sig: - obj_added(unnamed_metadata_uniques[uq_sig]) - - -def _correct_for_uq_duplicates_uix( - conn_unique_constraints, - conn_indexes, - metadata_unique_constraints, - metadata_indexes, - dialect, - impl, -): - # dedupe unique indexes vs. constraints, since MySQL / Oracle - # doesn't really have unique constraints as a separate construct. - # but look in the metadata and try to maintain constructs - # that already seem to be defined one way or the other - # on that side. This logic was formerly local to MySQL dialect, - # generalized to Oracle and others. See #276 - - # resolve final rendered name for unique constraints defined in the - # metadata. this includes truncation of long names. naming convention - # names currently should already be set as cons.name, however leave this - # to the sqla_compat to decide. - metadata_cons_names = [ - (sqla_compat._get_constraint_final_name(cons, dialect), cons) - for cons in metadata_unique_constraints - ] - - metadata_uq_names = { - name for name, cons in metadata_cons_names if name is not None - } - - unnamed_metadata_uqs = { - impl._create_metadata_constraint_sig(cons).unnamed - for name, cons in metadata_cons_names - if name is None - } - - metadata_ix_names = { - sqla_compat._get_constraint_final_name(cons, dialect) - for cons in metadata_indexes - if cons.unique - } - - # for reflection side, names are in their final database form - # already since they're from the database - conn_ix_names = {cons.name: cons for cons in conn_indexes if cons.unique} - - uqs_dupe_indexes = { - cons.name: cons - for cons in conn_unique_constraints - if cons.info["duplicates_index"] - } - - for overlap in uqs_dupe_indexes: - if overlap not in metadata_uq_names: - if ( - impl._create_reflected_constraint_sig( - uqs_dupe_indexes[overlap] - ).unnamed - not in unnamed_metadata_uqs - ): - conn_unique_constraints.discard(uqs_dupe_indexes[overlap]) - elif overlap not in metadata_ix_names: - conn_indexes.discard(conn_ix_names[overlap]) - - -@comparators.dispatch_for("column") -def _compare_nullable( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: Union[quoted_name, str], - cname: Union[quoted_name, str], - conn_col: Column[Any], - metadata_col: Column[Any], -) -> None: - metadata_col_nullable = metadata_col.nullable - conn_col_nullable = conn_col.nullable - alter_column_op.existing_nullable = conn_col_nullable - - if conn_col_nullable is not metadata_col_nullable: - if ( - sqla_compat._server_default_is_computed( - metadata_col.server_default, conn_col.server_default - ) - and sqla_compat._nullability_might_be_unset(metadata_col) - or ( - sqla_compat._server_default_is_identity( - metadata_col.server_default, conn_col.server_default - ) - ) - ): - log.info( - "Ignoring nullable change on identity column '%s.%s'", - tname, - cname, - ) - else: - alter_column_op.modify_nullable = metadata_col_nullable - log.info( - "Detected %s on column '%s.%s'", - "NULL" if metadata_col_nullable else "NOT NULL", - tname, - cname, - ) - - -@comparators.dispatch_for("column") -def _setup_autoincrement( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: Union[quoted_name, str], - cname: quoted_name, - conn_col: Column[Any], - metadata_col: Column[Any], -) -> None: - if metadata_col.table._autoincrement_column is metadata_col: - alter_column_op.kw["autoincrement"] = True - elif metadata_col.autoincrement is True: - alter_column_op.kw["autoincrement"] = True - elif metadata_col.autoincrement is False: - alter_column_op.kw["autoincrement"] = False - - -@comparators.dispatch_for("column") -def _compare_type( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: Union[quoted_name, str], - cname: Union[quoted_name, str], - conn_col: Column[Any], - metadata_col: Column[Any], -) -> None: - conn_type = conn_col.type - alter_column_op.existing_type = conn_type - metadata_type = metadata_col.type - if conn_type._type_affinity is sqltypes.NullType: - log.info( - "Couldn't determine database type " "for column '%s.%s'", - tname, - cname, - ) - return - if metadata_type._type_affinity is sqltypes.NullType: - log.info( - "Column '%s.%s' has no type within " "the model; can't compare", - tname, - cname, - ) - return - - isdiff = autogen_context.migration_context._compare_type( - conn_col, metadata_col - ) - - if isdiff: - alter_column_op.modify_type = metadata_type - log.info( - "Detected type change from %r to %r on '%s.%s'", - conn_type, - metadata_type, - tname, - cname, - ) - - -def _render_server_default_for_compare( - metadata_default: Optional[Any], autogen_context: AutogenContext -) -> Optional[str]: - if isinstance(metadata_default, sa_schema.DefaultClause): - if isinstance(metadata_default.arg, str): - metadata_default = metadata_default.arg - else: - metadata_default = str( - metadata_default.arg.compile( - dialect=autogen_context.dialect, - compile_kwargs={"literal_binds": True}, - ) - ) - if isinstance(metadata_default, str): - return metadata_default - else: - return None - - -def _normalize_computed_default(sqltext: str) -> str: - """we want to warn if a computed sql expression has changed. however - we don't want false positives and the warning is not that critical. - so filter out most forms of variability from the SQL text. - - """ - - return re.sub(r"[ \(\)'\"`\[\]\t\r\n]", "", sqltext).lower() - - -def _compare_computed_default( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: str, - cname: str, - conn_col: Column[Any], - metadata_col: Column[Any], -) -> None: - rendered_metadata_default = str( - cast(sa_schema.Computed, metadata_col.server_default).sqltext.compile( - dialect=autogen_context.dialect, - compile_kwargs={"literal_binds": True}, - ) - ) - - # since we cannot change computed columns, we do only a crude comparison - # here where we try to eliminate syntactical differences in order to - # get a minimal comparison just to emit a warning. - - rendered_metadata_default = _normalize_computed_default( - rendered_metadata_default - ) - - if isinstance(conn_col.server_default, sa_schema.Computed): - rendered_conn_default = str( - conn_col.server_default.sqltext.compile( - dialect=autogen_context.dialect, - compile_kwargs={"literal_binds": True}, - ) - ) - if rendered_conn_default is None: - rendered_conn_default = "" - else: - rendered_conn_default = _normalize_computed_default( - rendered_conn_default - ) - else: - rendered_conn_default = "" - - if rendered_metadata_default != rendered_conn_default: - _warn_computed_not_supported(tname, cname) - - -def _warn_computed_not_supported(tname: str, cname: str) -> None: - util.warn("Computed default on %s.%s cannot be modified" % (tname, cname)) - - -def _compare_identity_default( - autogen_context, - alter_column_op, - schema, - tname, - cname, - conn_col, - metadata_col, -): - impl = autogen_context.migration_context.impl - diff, ignored_attr, is_alter = impl._compare_identity_default( - metadata_col.server_default, conn_col.server_default - ) - - return diff, is_alter - - -@comparators.dispatch_for("column") -def _compare_server_default( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: Union[quoted_name, str], - cname: Union[quoted_name, str], - conn_col: Column[Any], - metadata_col: Column[Any], -) -> Optional[bool]: - metadata_default = metadata_col.server_default - conn_col_default = conn_col.server_default - if conn_col_default is None and metadata_default is None: - return False - - if sqla_compat._server_default_is_computed(metadata_default): - # return False in case of a computed column as the server - # default. Note that DDL for adding or removing "GENERATED AS" from - # an existing column is not currently known for any backend. - # Once SQLAlchemy can reflect "GENERATED" as the "computed" element, - # we would also want to ignore and/or warn for changes vs. the - # metadata (or support backend specific DDL if applicable). - if not sqla_compat.has_computed_reflection: - return False - - else: - return ( - _compare_computed_default( # type:ignore[func-returns-value] - autogen_context, - alter_column_op, - schema, - tname, - cname, - conn_col, - metadata_col, - ) - ) - if sqla_compat._server_default_is_computed(conn_col_default): - _warn_computed_not_supported(tname, cname) - return False - - if sqla_compat._server_default_is_identity( - metadata_default, conn_col_default - ): - alter_column_op.existing_server_default = conn_col_default - diff, is_alter = _compare_identity_default( - autogen_context, - alter_column_op, - schema, - tname, - cname, - conn_col, - metadata_col, - ) - if is_alter: - alter_column_op.modify_server_default = metadata_default - if diff: - log.info( - "Detected server default on column '%s.%s': " - "identity options attributes %s", - tname, - cname, - sorted(diff), - ) - else: - rendered_metadata_default = _render_server_default_for_compare( - metadata_default, autogen_context - ) - - rendered_conn_default = ( - cast(Any, conn_col_default).arg.text if conn_col_default else None - ) - - alter_column_op.existing_server_default = conn_col_default - - is_diff = autogen_context.migration_context._compare_server_default( - conn_col, - metadata_col, - rendered_metadata_default, - rendered_conn_default, - ) - if is_diff: - alter_column_op.modify_server_default = metadata_default - log.info("Detected server default on column '%s.%s'", tname, cname) - - return None - - -@comparators.dispatch_for("column") -def _compare_column_comment( - autogen_context: AutogenContext, - alter_column_op: AlterColumnOp, - schema: Optional[str], - tname: Union[quoted_name, str], - cname: quoted_name, - conn_col: Column[Any], - metadata_col: Column[Any], -) -> Optional[Literal[False]]: - assert autogen_context.dialect is not None - if not autogen_context.dialect.supports_comments: - return None - - metadata_comment = metadata_col.comment - conn_col_comment = conn_col.comment - if conn_col_comment is None and metadata_comment is None: - return False - - alter_column_op.existing_comment = conn_col_comment - - if conn_col_comment != metadata_comment: - alter_column_op.modify_comment = metadata_comment - log.info("Detected column comment '%s.%s'", tname, cname) - - return None - - -@comparators.dispatch_for("table") -def _compare_foreign_keys( - autogen_context: AutogenContext, - modify_table_ops: ModifyTableOps, - schema: Optional[str], - tname: Union[quoted_name, str], - conn_table: Table, - metadata_table: Table, -) -> None: - # if we're doing CREATE TABLE, all FKs are created - # inline within the table def - if conn_table is None or metadata_table is None: - return - - inspector = autogen_context.inspector - metadata_fks = { - fk - for fk in metadata_table.constraints - if isinstance(fk, sa_schema.ForeignKeyConstraint) - } - - conn_fks_list = [ - fk - for fk in inspector.get_foreign_keys(tname, schema=schema) - if autogen_context.run_name_filters( - fk["name"], - "foreign_key_constraint", - {"table_name": tname, "schema_name": schema}, - ) - ] - - conn_fks = { - _make_foreign_key(const, conn_table) # type: ignore[arg-type] - for const in conn_fks_list - } - - impl = autogen_context.migration_context.impl - - # give the dialect a chance to correct the FKs to match more - # closely - autogen_context.migration_context.impl.correct_for_autogen_foreignkeys( - conn_fks, metadata_fks - ) - - metadata_fks_sig = { - impl._create_metadata_constraint_sig(fk) for fk in metadata_fks - } - - conn_fks_sig = { - impl._create_reflected_constraint_sig(fk) for fk in conn_fks - } - - # check if reflected FKs include options, indicating the backend - # can reflect FK options - if conn_fks_list and "options" in conn_fks_list[0]: - conn_fks_by_sig = {c.unnamed: c for c in conn_fks_sig} - metadata_fks_by_sig = {c.unnamed: c for c in metadata_fks_sig} - else: - # otherwise compare by sig without options added - conn_fks_by_sig = {c.unnamed_no_options: c for c in conn_fks_sig} - metadata_fks_by_sig = { - c.unnamed_no_options: c for c in metadata_fks_sig - } - - metadata_fks_by_name = { - c.name: c for c in metadata_fks_sig if c.name is not None - } - conn_fks_by_name = {c.name: c for c in conn_fks_sig if c.name is not None} - - def _add_fk(obj, compare_to): - if autogen_context.run_object_filters( - obj.const, obj.name, "foreign_key_constraint", False, compare_to - ): - modify_table_ops.ops.append( - ops.CreateForeignKeyOp.from_constraint(const.const) # type: ignore[has-type] # noqa: E501 - ) - - log.info( - "Detected added foreign key (%s)(%s) on table %s%s", - ", ".join(obj.source_columns), - ", ".join(obj.target_columns), - "%s." % obj.source_schema if obj.source_schema else "", - obj.source_table, - ) - - def _remove_fk(obj, compare_to): - if autogen_context.run_object_filters( - obj.const, obj.name, "foreign_key_constraint", True, compare_to - ): - modify_table_ops.ops.append( - ops.DropConstraintOp.from_constraint(obj.const) - ) - log.info( - "Detected removed foreign key (%s)(%s) on table %s%s", - ", ".join(obj.source_columns), - ", ".join(obj.target_columns), - "%s." % obj.source_schema if obj.source_schema else "", - obj.source_table, - ) - - # so far it appears we don't need to do this by name at all. - # SQLite doesn't preserve constraint names anyway - - for removed_sig in set(conn_fks_by_sig).difference(metadata_fks_by_sig): - const = conn_fks_by_sig[removed_sig] - if removed_sig not in metadata_fks_by_sig: - compare_to = ( - metadata_fks_by_name[const.name].const - if const.name in metadata_fks_by_name - else None - ) - _remove_fk(const, compare_to) - - for added_sig in set(metadata_fks_by_sig).difference(conn_fks_by_sig): - const = metadata_fks_by_sig[added_sig] - if added_sig not in conn_fks_by_sig: - compare_to = ( - conn_fks_by_name[const.name].const - if const.name in conn_fks_by_name - else None - ) - _add_fk(const, compare_to) - - -@comparators.dispatch_for("table") -def _compare_table_comment( - autogen_context: AutogenContext, - modify_table_ops: ModifyTableOps, - schema: Optional[str], - tname: Union[quoted_name, str], - conn_table: Optional[Table], - metadata_table: Optional[Table], -) -> None: - assert autogen_context.dialect is not None - if not autogen_context.dialect.supports_comments: - return - - # if we're doing CREATE TABLE, comments will be created inline - # with the create_table op. - if conn_table is None or metadata_table is None: - return - - if conn_table.comment is None and metadata_table.comment is None: - return - - if metadata_table.comment is None and conn_table.comment is not None: - modify_table_ops.ops.append( - ops.DropTableCommentOp( - tname, existing_comment=conn_table.comment, schema=schema - ) - ) - elif metadata_table.comment != conn_table.comment: - modify_table_ops.ops.append( - ops.CreateTableCommentOp( - tname, - metadata_table.comment, - existing_comment=conn_table.comment, - schema=schema, - ) - ) diff --git a/myenv/Lib/site-packages/alembic/autogenerate/render.py b/myenv/Lib/site-packages/alembic/autogenerate/render.py deleted file mode 100644 index 38bdbfc..0000000 --- a/myenv/Lib/site-packages/alembic/autogenerate/render.py +++ /dev/null @@ -1,1118 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -from io import StringIO -import re -from typing import Any -from typing import cast -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple -from typing import TYPE_CHECKING -from typing import Union - -from mako.pygen import PythonPrinter -from sqlalchemy import schema as sa_schema -from sqlalchemy import sql -from sqlalchemy import types as sqltypes -from sqlalchemy.sql.elements import conv -from sqlalchemy.sql.elements import quoted_name - -from .. import util -from ..operations import ops -from ..util import sqla_compat - -if TYPE_CHECKING: - from typing import Literal - - from sqlalchemy.sql.base import DialectKWArgs - from sqlalchemy.sql.elements import ColumnElement - from sqlalchemy.sql.elements import TextClause - from sqlalchemy.sql.schema import CheckConstraint - from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Constraint - from sqlalchemy.sql.schema import FetchedValue - from sqlalchemy.sql.schema import ForeignKey - from sqlalchemy.sql.schema import ForeignKeyConstraint - from sqlalchemy.sql.schema import Index - from sqlalchemy.sql.schema import MetaData - from sqlalchemy.sql.schema import PrimaryKeyConstraint - from sqlalchemy.sql.schema import UniqueConstraint - from sqlalchemy.sql.sqltypes import ARRAY - from sqlalchemy.sql.type_api import TypeEngine - - from alembic.autogenerate.api import AutogenContext - from alembic.config import Config - from alembic.operations.ops import MigrationScript - from alembic.operations.ops import ModifyTableOps - from alembic.util.sqla_compat import Computed - from alembic.util.sqla_compat import Identity - - -MAX_PYTHON_ARGS = 255 - - -def _render_gen_name( - autogen_context: AutogenContext, - name: sqla_compat._ConstraintName, -) -> Optional[Union[quoted_name, str, _f_name]]: - if isinstance(name, conv): - return _f_name(_alembic_autogenerate_prefix(autogen_context), name) - else: - return sqla_compat.constraint_name_or_none(name) - - -def _indent(text: str) -> str: - text = re.compile(r"^", re.M).sub(" ", text).strip() - text = re.compile(r" +$", re.M).sub("", text) - return text - - -def _render_python_into_templatevars( - autogen_context: AutogenContext, - migration_script: MigrationScript, - template_args: Dict[str, Union[str, Config]], -) -> None: - imports = autogen_context.imports - - for upgrade_ops, downgrade_ops in zip( - migration_script.upgrade_ops_list, migration_script.downgrade_ops_list - ): - template_args[upgrade_ops.upgrade_token] = _indent( - _render_cmd_body(upgrade_ops, autogen_context) - ) - template_args[downgrade_ops.downgrade_token] = _indent( - _render_cmd_body(downgrade_ops, autogen_context) - ) - template_args["imports"] = "\n".join(sorted(imports)) - - -default_renderers = renderers = util.Dispatcher() - - -def _render_cmd_body( - op_container: ops.OpContainer, - autogen_context: AutogenContext, -) -> str: - buf = StringIO() - printer = PythonPrinter(buf) - - printer.writeline( - "# ### commands auto generated by Alembic - please adjust! ###" - ) - - has_lines = False - for op in op_container.ops: - lines = render_op(autogen_context, op) - has_lines = has_lines or bool(lines) - - for line in lines: - printer.writeline(line) - - if not has_lines: - printer.writeline("pass") - - printer.writeline("# ### end Alembic commands ###") - - return buf.getvalue() - - -def render_op( - autogen_context: AutogenContext, op: ops.MigrateOperation -) -> List[str]: - renderer = renderers.dispatch(op) - lines = util.to_list(renderer(autogen_context, op)) - return lines - - -def render_op_text( - autogen_context: AutogenContext, op: ops.MigrateOperation -) -> str: - return "\n".join(render_op(autogen_context, op)) - - -@renderers.dispatch_for(ops.ModifyTableOps) -def _render_modify_table( - autogen_context: AutogenContext, op: ModifyTableOps -) -> List[str]: - opts = autogen_context.opts - render_as_batch = opts.get("render_as_batch", False) - - if op.ops: - lines = [] - if render_as_batch: - with autogen_context._within_batch(): - lines.append( - "with op.batch_alter_table(%r, schema=%r) as batch_op:" - % (op.table_name, op.schema) - ) - for t_op in op.ops: - t_lines = render_op(autogen_context, t_op) - lines.extend(t_lines) - lines.append("") - else: - for t_op in op.ops: - t_lines = render_op(autogen_context, t_op) - lines.extend(t_lines) - - return lines - else: - return [] - - -@renderers.dispatch_for(ops.CreateTableCommentOp) -def _render_create_table_comment( - autogen_context: AutogenContext, op: ops.CreateTableCommentOp -) -> str: - if autogen_context._has_batch: - templ = ( - "{prefix}create_table_comment(\n" - "{indent}{comment},\n" - "{indent}existing_comment={existing}\n" - ")" - ) - else: - templ = ( - "{prefix}create_table_comment(\n" - "{indent}'{tname}',\n" - "{indent}{comment},\n" - "{indent}existing_comment={existing},\n" - "{indent}schema={schema}\n" - ")" - ) - return templ.format( - prefix=_alembic_autogenerate_prefix(autogen_context), - tname=op.table_name, - comment="%r" % op.comment if op.comment is not None else None, - existing=( - "%r" % op.existing_comment - if op.existing_comment is not None - else None - ), - schema="'%s'" % op.schema if op.schema is not None else None, - indent=" ", - ) - - -@renderers.dispatch_for(ops.DropTableCommentOp) -def _render_drop_table_comment( - autogen_context: AutogenContext, op: ops.DropTableCommentOp -) -> str: - if autogen_context._has_batch: - templ = ( - "{prefix}drop_table_comment(\n" - "{indent}existing_comment={existing}\n" - ")" - ) - else: - templ = ( - "{prefix}drop_table_comment(\n" - "{indent}'{tname}',\n" - "{indent}existing_comment={existing},\n" - "{indent}schema={schema}\n" - ")" - ) - return templ.format( - prefix=_alembic_autogenerate_prefix(autogen_context), - tname=op.table_name, - existing=( - "%r" % op.existing_comment - if op.existing_comment is not None - else None - ), - schema="'%s'" % op.schema if op.schema is not None else None, - indent=" ", - ) - - -@renderers.dispatch_for(ops.CreateTableOp) -def _add_table(autogen_context: AutogenContext, op: ops.CreateTableOp) -> str: - table = op.to_table() - - args = [ - col - for col in [ - _render_column(col, autogen_context) for col in table.columns - ] - if col - ] + sorted( - [ - rcons - for rcons in [ - _render_constraint( - cons, autogen_context, op._namespace_metadata - ) - for cons in table.constraints - ] - if rcons is not None - ] - ) - - if len(args) > MAX_PYTHON_ARGS: - args_str = "*[" + ",\n".join(args) + "]" - else: - args_str = ",\n".join(args) - - text = "%(prefix)screate_table(%(tablename)r,\n%(args)s" % { - "tablename": _ident(op.table_name), - "prefix": _alembic_autogenerate_prefix(autogen_context), - "args": args_str, - } - if op.schema: - text += ",\nschema=%r" % _ident(op.schema) - - comment = table.comment - if comment: - text += ",\ncomment=%r" % _ident(comment) - - info = table.info - if info: - text += f",\ninfo={info!r}" - - for k in sorted(op.kw): - text += ",\n%s=%r" % (k.replace(" ", "_"), op.kw[k]) - - if table._prefixes: - prefixes = ", ".join("'%s'" % p for p in table._prefixes) - text += ",\nprefixes=[%s]" % prefixes - - if op.if_not_exists is not None: - text += ",\nif_not_exists=%r" % bool(op.if_not_exists) - - text += "\n)" - return text - - -@renderers.dispatch_for(ops.DropTableOp) -def _drop_table(autogen_context: AutogenContext, op: ops.DropTableOp) -> str: - text = "%(prefix)sdrop_table(%(tname)r" % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "tname": _ident(op.table_name), - } - if op.schema: - text += ", schema=%r" % _ident(op.schema) - - if op.if_exists is not None: - text += ", if_exists=%r" % bool(op.if_exists) - - text += ")" - return text - - -def _render_dialect_kwargs_items( - autogen_context: AutogenContext, item: DialectKWArgs -) -> list[str]: - return [ - f"{key}={_render_potential_expr(val, autogen_context)}" - for key, val in item.dialect_kwargs.items() - ] - - -@renderers.dispatch_for(ops.CreateIndexOp) -def _add_index(autogen_context: AutogenContext, op: ops.CreateIndexOp) -> str: - index = op.to_index() - - has_batch = autogen_context._has_batch - - if has_batch: - tmpl = ( - "%(prefix)screate_index(%(name)r, [%(columns)s], " - "unique=%(unique)r%(kwargs)s)" - ) - else: - tmpl = ( - "%(prefix)screate_index(%(name)r, %(table)r, [%(columns)s], " - "unique=%(unique)r%(schema)s%(kwargs)s)" - ) - - assert index.table is not None - - opts = _render_dialect_kwargs_items(autogen_context, index) - if op.if_not_exists is not None: - opts.append("if_not_exists=%r" % bool(op.if_not_exists)) - text = tmpl % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "name": _render_gen_name(autogen_context, index.name), - "table": _ident(index.table.name), - "columns": ", ".join( - _get_index_rendered_expressions(index, autogen_context) - ), - "unique": index.unique or False, - "schema": ( - (", schema=%r" % _ident(index.table.schema)) - if index.table.schema - else "" - ), - "kwargs": ", " + ", ".join(opts) if opts else "", - } - return text - - -@renderers.dispatch_for(ops.DropIndexOp) -def _drop_index(autogen_context: AutogenContext, op: ops.DropIndexOp) -> str: - index = op.to_index() - - has_batch = autogen_context._has_batch - - if has_batch: - tmpl = "%(prefix)sdrop_index(%(name)r%(kwargs)s)" - else: - tmpl = ( - "%(prefix)sdrop_index(%(name)r, " - "table_name=%(table_name)r%(schema)s%(kwargs)s)" - ) - opts = _render_dialect_kwargs_items(autogen_context, index) - if op.if_exists is not None: - opts.append("if_exists=%r" % bool(op.if_exists)) - text = tmpl % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "name": _render_gen_name(autogen_context, op.index_name), - "table_name": _ident(op.table_name), - "schema": ((", schema=%r" % _ident(op.schema)) if op.schema else ""), - "kwargs": ", " + ", ".join(opts) if opts else "", - } - return text - - -@renderers.dispatch_for(ops.CreateUniqueConstraintOp) -def _add_unique_constraint( - autogen_context: AutogenContext, op: ops.CreateUniqueConstraintOp -) -> List[str]: - return [_uq_constraint(op.to_constraint(), autogen_context, True)] - - -@renderers.dispatch_for(ops.CreateForeignKeyOp) -def _add_fk_constraint( - autogen_context: AutogenContext, op: ops.CreateForeignKeyOp -) -> str: - args = [repr(_render_gen_name(autogen_context, op.constraint_name))] - if not autogen_context._has_batch: - args.append(repr(_ident(op.source_table))) - - args.extend( - [ - repr(_ident(op.referent_table)), - repr([_ident(col) for col in op.local_cols]), - repr([_ident(col) for col in op.remote_cols]), - ] - ) - kwargs = [ - "referent_schema", - "onupdate", - "ondelete", - "initially", - "deferrable", - "use_alter", - "match", - ] - if not autogen_context._has_batch: - kwargs.insert(0, "source_schema") - - for k in kwargs: - if k in op.kw: - value = op.kw[k] - if value is not None: - args.append("%s=%r" % (k, value)) - - return "%(prefix)screate_foreign_key(%(args)s)" % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "args": ", ".join(args), - } - - -@renderers.dispatch_for(ops.CreatePrimaryKeyOp) -def _add_pk_constraint(constraint, autogen_context): - raise NotImplementedError() - - -@renderers.dispatch_for(ops.CreateCheckConstraintOp) -def _add_check_constraint(constraint, autogen_context): - raise NotImplementedError() - - -@renderers.dispatch_for(ops.DropConstraintOp) -def _drop_constraint( - autogen_context: AutogenContext, op: ops.DropConstraintOp -) -> str: - prefix = _alembic_autogenerate_prefix(autogen_context) - name = _render_gen_name(autogen_context, op.constraint_name) - schema = _ident(op.schema) if op.schema else None - type_ = _ident(op.constraint_type) if op.constraint_type else None - - params_strs = [] - params_strs.append(repr(name)) - if not autogen_context._has_batch: - params_strs.append(repr(_ident(op.table_name))) - if schema is not None: - params_strs.append(f"schema={schema!r}") - if type_ is not None: - params_strs.append(f"type_={type_!r}") - - return f"{prefix}drop_constraint({', '.join(params_strs)})" - - -@renderers.dispatch_for(ops.AddColumnOp) -def _add_column(autogen_context: AutogenContext, op: ops.AddColumnOp) -> str: - schema, tname, column = op.schema, op.table_name, op.column - if autogen_context._has_batch: - template = "%(prefix)sadd_column(%(column)s)" - else: - template = "%(prefix)sadd_column(%(tname)r, %(column)s" - if schema: - template += ", schema=%(schema)r" - template += ")" - text = template % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "tname": tname, - "column": _render_column(column, autogen_context), - "schema": schema, - } - return text - - -@renderers.dispatch_for(ops.DropColumnOp) -def _drop_column(autogen_context: AutogenContext, op: ops.DropColumnOp) -> str: - schema, tname, column_name = op.schema, op.table_name, op.column_name - - if autogen_context._has_batch: - template = "%(prefix)sdrop_column(%(cname)r)" - else: - template = "%(prefix)sdrop_column(%(tname)r, %(cname)r" - if schema: - template += ", schema=%(schema)r" - template += ")" - - text = template % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "tname": _ident(tname), - "cname": _ident(column_name), - "schema": _ident(schema), - } - return text - - -@renderers.dispatch_for(ops.AlterColumnOp) -def _alter_column( - autogen_context: AutogenContext, op: ops.AlterColumnOp -) -> str: - tname = op.table_name - cname = op.column_name - server_default = op.modify_server_default - type_ = op.modify_type - nullable = op.modify_nullable - comment = op.modify_comment - autoincrement = op.kw.get("autoincrement", None) - existing_type = op.existing_type - existing_nullable = op.existing_nullable - existing_comment = op.existing_comment - existing_server_default = op.existing_server_default - schema = op.schema - - indent = " " * 11 - - if autogen_context._has_batch: - template = "%(prefix)salter_column(%(cname)r" - else: - template = "%(prefix)salter_column(%(tname)r, %(cname)r" - - text = template % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "tname": tname, - "cname": cname, - } - if existing_type is not None: - text += ",\n%sexisting_type=%s" % ( - indent, - _repr_type(existing_type, autogen_context), - ) - if server_default is not False: - rendered = _render_server_default(server_default, autogen_context) - text += ",\n%sserver_default=%s" % (indent, rendered) - - if type_ is not None: - text += ",\n%stype_=%s" % (indent, _repr_type(type_, autogen_context)) - if nullable is not None: - text += ",\n%snullable=%r" % (indent, nullable) - if comment is not False: - text += ",\n%scomment=%r" % (indent, comment) - if existing_comment is not None: - text += ",\n%sexisting_comment=%r" % (indent, existing_comment) - if nullable is None and existing_nullable is not None: - text += ",\n%sexisting_nullable=%r" % (indent, existing_nullable) - if autoincrement is not None: - text += ",\n%sautoincrement=%r" % (indent, autoincrement) - if server_default is False and existing_server_default: - rendered = _render_server_default( - existing_server_default, autogen_context - ) - text += ",\n%sexisting_server_default=%s" % (indent, rendered) - if schema and not autogen_context._has_batch: - text += ",\n%sschema=%r" % (indent, schema) - text += ")" - return text - - -class _f_name: - def __init__(self, prefix: str, name: conv) -> None: - self.prefix = prefix - self.name = name - - def __repr__(self) -> str: - return "%sf(%r)" % (self.prefix, _ident(self.name)) - - -def _ident(name: Optional[Union[quoted_name, str]]) -> Optional[str]: - """produce a __repr__() object for a string identifier that may - use quoted_name() in SQLAlchemy 0.9 and greater. - - The issue worked around here is that quoted_name() doesn't have - very good repr() behavior by itself when unicode is involved. - - """ - if name is None: - return name - elif isinstance(name, quoted_name): - return str(name) - elif isinstance(name, str): - return name - - -def _render_potential_expr( - value: Any, - autogen_context: AutogenContext, - *, - wrap_in_text: bool = True, - is_server_default: bool = False, - is_index: bool = False, -) -> str: - if isinstance(value, sql.ClauseElement): - if wrap_in_text: - template = "%(prefix)stext(%(sql)r)" - else: - template = "%(sql)r" - - return template % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "sql": autogen_context.migration_context.impl.render_ddl_sql_expr( - value, is_server_default=is_server_default, is_index=is_index - ), - } - - else: - return repr(value) - - -def _get_index_rendered_expressions( - idx: Index, autogen_context: AutogenContext -) -> List[str]: - return [ - ( - repr(_ident(getattr(exp, "name", None))) - if isinstance(exp, sa_schema.Column) - else _render_potential_expr(exp, autogen_context, is_index=True) - ) - for exp in idx.expressions - ] - - -def _uq_constraint( - constraint: UniqueConstraint, - autogen_context: AutogenContext, - alter: bool, -) -> str: - opts: List[Tuple[str, Any]] = [] - - has_batch = autogen_context._has_batch - - if constraint.deferrable: - opts.append(("deferrable", str(constraint.deferrable))) - if constraint.initially: - opts.append(("initially", str(constraint.initially))) - if not has_batch and alter and constraint.table.schema: - opts.append(("schema", _ident(constraint.table.schema))) - if not alter and constraint.name: - opts.append( - ("name", _render_gen_name(autogen_context, constraint.name)) - ) - dialect_options = _render_dialect_kwargs_items(autogen_context, constraint) - - if alter: - args = [repr(_render_gen_name(autogen_context, constraint.name))] - if not has_batch: - args += [repr(_ident(constraint.table.name))] - args.append(repr([_ident(col.name) for col in constraint.columns])) - args.extend(["%s=%r" % (k, v) for k, v in opts]) - args.extend(dialect_options) - return "%(prefix)screate_unique_constraint(%(args)s)" % { - "prefix": _alembic_autogenerate_prefix(autogen_context), - "args": ", ".join(args), - } - else: - args = [repr(_ident(col.name)) for col in constraint.columns] - args.extend(["%s=%r" % (k, v) for k, v in opts]) - args.extend(dialect_options) - return "%(prefix)sUniqueConstraint(%(args)s)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "args": ", ".join(args), - } - - -def _user_autogenerate_prefix(autogen_context, target): - prefix = autogen_context.opts["user_module_prefix"] - if prefix is None: - return "%s." % target.__module__ - else: - return prefix - - -def _sqlalchemy_autogenerate_prefix(autogen_context: AutogenContext) -> str: - return autogen_context.opts["sqlalchemy_module_prefix"] or "" - - -def _alembic_autogenerate_prefix(autogen_context: AutogenContext) -> str: - if autogen_context._has_batch: - return "batch_op." - else: - return autogen_context.opts["alembic_module_prefix"] or "" - - -def _user_defined_render( - type_: str, object_: Any, autogen_context: AutogenContext -) -> Union[str, Literal[False]]: - if "render_item" in autogen_context.opts: - render = autogen_context.opts["render_item"] - if render: - rendered = render(type_, object_, autogen_context) - if rendered is not False: - return rendered - return False - - -def _render_column( - column: Column[Any], autogen_context: AutogenContext -) -> str: - rendered = _user_defined_render("column", column, autogen_context) - if rendered is not False: - return rendered - - args: List[str] = [] - opts: List[Tuple[str, Any]] = [] - - if column.server_default: - rendered = _render_server_default( # type:ignore[assignment] - column.server_default, autogen_context - ) - if rendered: - if _should_render_server_default_positionally( - column.server_default - ): - args.append(rendered) - else: - opts.append(("server_default", rendered)) - - if ( - column.autoincrement is not None - and column.autoincrement != sqla_compat.AUTOINCREMENT_DEFAULT - ): - opts.append(("autoincrement", column.autoincrement)) - - if column.nullable is not None: - opts.append(("nullable", column.nullable)) - - if column.system: - opts.append(("system", column.system)) - - comment = column.comment - if comment: - opts.append(("comment", "%r" % comment)) - - # TODO: for non-ascii colname, assign a "key" - return "%(prefix)sColumn(%(name)r, %(type)s, %(args)s%(kwargs)s)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "name": _ident(column.name), - "type": _repr_type(column.type, autogen_context), - "args": ", ".join([str(arg) for arg in args]) + ", " if args else "", - "kwargs": ( - ", ".join( - ["%s=%s" % (kwname, val) for kwname, val in opts] - + [ - "%s=%s" - % (key, _render_potential_expr(val, autogen_context)) - for key, val in sqla_compat._column_kwargs(column).items() - ] - ) - ), - } - - -def _should_render_server_default_positionally(server_default: Any) -> bool: - return sqla_compat._server_default_is_computed( - server_default - ) or sqla_compat._server_default_is_identity(server_default) - - -def _render_server_default( - default: Optional[ - Union[FetchedValue, str, TextClause, ColumnElement[Any]] - ], - autogen_context: AutogenContext, - repr_: bool = True, -) -> Optional[str]: - rendered = _user_defined_render("server_default", default, autogen_context) - if rendered is not False: - return rendered - - if sqla_compat._server_default_is_computed(default): - return _render_computed(cast("Computed", default), autogen_context) - elif sqla_compat._server_default_is_identity(default): - return _render_identity(cast("Identity", default), autogen_context) - elif isinstance(default, sa_schema.DefaultClause): - if isinstance(default.arg, str): - default = default.arg - else: - return _render_potential_expr( - default.arg, autogen_context, is_server_default=True - ) - - if isinstance(default, str) and repr_: - default = repr(re.sub(r"^'|'$", "", default)) - - return cast(str, default) - - -def _render_computed( - computed: Computed, autogen_context: AutogenContext -) -> str: - text = _render_potential_expr( - computed.sqltext, autogen_context, wrap_in_text=False - ) - - kwargs = {} - if computed.persisted is not None: - kwargs["persisted"] = computed.persisted - return "%(prefix)sComputed(%(text)s, %(kwargs)s)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "text": text, - "kwargs": (", ".join("%s=%s" % pair for pair in kwargs.items())), - } - - -def _render_identity( - identity: Identity, autogen_context: AutogenContext -) -> str: - kwargs = sqla_compat._get_identity_options_dict( - identity, dialect_kwargs=True - ) - - return "%(prefix)sIdentity(%(kwargs)s)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "kwargs": (", ".join("%s=%s" % pair for pair in kwargs.items())), - } - - -def _repr_type( - type_: TypeEngine, - autogen_context: AutogenContext, - _skip_variants: bool = False, -) -> str: - rendered = _user_defined_render("type", type_, autogen_context) - if rendered is not False: - return rendered - - if hasattr(autogen_context.migration_context, "impl"): - impl_rt = autogen_context.migration_context.impl.render_type( - type_, autogen_context - ) - else: - impl_rt = None - - mod = type(type_).__module__ - imports = autogen_context.imports - if mod.startswith("sqlalchemy.dialects"): - match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod) - assert match is not None - dname = match.group(1) - if imports is not None: - imports.add("from sqlalchemy.dialects import %s" % dname) - if impl_rt: - return impl_rt - else: - return "%s.%r" % (dname, type_) - elif impl_rt: - return impl_rt - elif not _skip_variants and sqla_compat._type_has_variants(type_): - return _render_Variant_type(type_, autogen_context) - elif mod.startswith("sqlalchemy."): - if "_render_%s_type" % type_.__visit_name__ in globals(): - fn = globals()["_render_%s_type" % type_.__visit_name__] - return fn(type_, autogen_context) - else: - prefix = _sqlalchemy_autogenerate_prefix(autogen_context) - return "%s%r" % (prefix, type_) - else: - prefix = _user_autogenerate_prefix(autogen_context, type_) - return "%s%r" % (prefix, type_) - - -def _render_ARRAY_type(type_: ARRAY, autogen_context: AutogenContext) -> str: - return cast( - str, - _render_type_w_subtype( - type_, autogen_context, "item_type", r"(.+?\()" - ), - ) - - -def _render_Variant_type( - type_: TypeEngine, autogen_context: AutogenContext -) -> str: - base_type, variant_mapping = sqla_compat._get_variant_mapping(type_) - base = _repr_type(base_type, autogen_context, _skip_variants=True) - assert base is not None and base is not False # type: ignore[comparison-overlap] # noqa:E501 - for dialect in sorted(variant_mapping): - typ = variant_mapping[dialect] - base += ".with_variant(%s, %r)" % ( - _repr_type(typ, autogen_context, _skip_variants=True), - dialect, - ) - return base - - -def _render_type_w_subtype( - type_: TypeEngine, - autogen_context: AutogenContext, - attrname: str, - regexp: str, - prefix: Optional[str] = None, -) -> Union[Optional[str], Literal[False]]: - outer_repr = repr(type_) - inner_type = getattr(type_, attrname, None) - if inner_type is None: - return False - - inner_repr = repr(inner_type) - - inner_repr = re.sub(r"([\(\)])", r"\\\1", inner_repr) - sub_type = _repr_type(getattr(type_, attrname), autogen_context) - outer_type = re.sub(regexp + inner_repr, r"\1%s" % sub_type, outer_repr) - - if prefix: - return "%s%s" % (prefix, outer_type) - - mod = type(type_).__module__ - if mod.startswith("sqlalchemy.dialects"): - match = re.match(r"sqlalchemy\.dialects\.(\w+)", mod) - assert match is not None - dname = match.group(1) - return "%s.%s" % (dname, outer_type) - elif mod.startswith("sqlalchemy"): - prefix = _sqlalchemy_autogenerate_prefix(autogen_context) - return "%s%s" % (prefix, outer_type) - else: - return None - - -_constraint_renderers = util.Dispatcher() - - -def _render_constraint( - constraint: Constraint, - autogen_context: AutogenContext, - namespace_metadata: Optional[MetaData], -) -> Optional[str]: - try: - renderer = _constraint_renderers.dispatch(constraint) - except ValueError: - util.warn("No renderer is established for object %r" % constraint) - return "[Unknown Python object %r]" % constraint - else: - return renderer(constraint, autogen_context, namespace_metadata) - - -@_constraint_renderers.dispatch_for(sa_schema.PrimaryKeyConstraint) -def _render_primary_key( - constraint: PrimaryKeyConstraint, - autogen_context: AutogenContext, - namespace_metadata: Optional[MetaData], -) -> Optional[str]: - rendered = _user_defined_render("primary_key", constraint, autogen_context) - if rendered is not False: - return rendered - - if not constraint.columns: - return None - - opts = [] - if constraint.name: - opts.append( - ("name", repr(_render_gen_name(autogen_context, constraint.name))) - ) - return "%(prefix)sPrimaryKeyConstraint(%(args)s)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "args": ", ".join( - [repr(c.name) for c in constraint.columns] - + ["%s=%s" % (kwname, val) for kwname, val in opts] - ), - } - - -def _fk_colspec( - fk: ForeignKey, - metadata_schema: Optional[str], - namespace_metadata: MetaData, -) -> str: - """Implement a 'safe' version of ForeignKey._get_colspec() that - won't fail if the remote table can't be resolved. - - """ - colspec = fk._get_colspec() - tokens = colspec.split(".") - tname, colname = tokens[-2:] - - if metadata_schema is not None and len(tokens) == 2: - table_fullname = "%s.%s" % (metadata_schema, tname) - else: - table_fullname = ".".join(tokens[0:-1]) - - if ( - not fk.link_to_name - and fk.parent is not None - and fk.parent.table is not None - ): - # try to resolve the remote table in order to adjust for column.key. - # the FK constraint needs to be rendered in terms of the column - # name. - - if table_fullname in namespace_metadata.tables: - col = namespace_metadata.tables[table_fullname].c.get(colname) - if col is not None: - colname = _ident(col.name) # type: ignore[assignment] - - colspec = "%s.%s" % (table_fullname, colname) - - return colspec - - -def _populate_render_fk_opts( - constraint: ForeignKeyConstraint, opts: List[Tuple[str, str]] -) -> None: - if constraint.onupdate: - opts.append(("onupdate", repr(constraint.onupdate))) - if constraint.ondelete: - opts.append(("ondelete", repr(constraint.ondelete))) - if constraint.initially: - opts.append(("initially", repr(constraint.initially))) - if constraint.deferrable: - opts.append(("deferrable", repr(constraint.deferrable))) - if constraint.use_alter: - opts.append(("use_alter", repr(constraint.use_alter))) - if constraint.match: - opts.append(("match", repr(constraint.match))) - - -@_constraint_renderers.dispatch_for(sa_schema.ForeignKeyConstraint) -def _render_foreign_key( - constraint: ForeignKeyConstraint, - autogen_context: AutogenContext, - namespace_metadata: MetaData, -) -> Optional[str]: - rendered = _user_defined_render("foreign_key", constraint, autogen_context) - if rendered is not False: - return rendered - - opts = [] - if constraint.name: - opts.append( - ("name", repr(_render_gen_name(autogen_context, constraint.name))) - ) - - _populate_render_fk_opts(constraint, opts) - - apply_metadata_schema = namespace_metadata.schema - return ( - "%(prefix)sForeignKeyConstraint([%(cols)s], " - "[%(refcols)s], %(args)s)" - % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "cols": ", ".join( - repr(_ident(f.parent.name)) for f in constraint.elements - ), - "refcols": ", ".join( - repr(_fk_colspec(f, apply_metadata_schema, namespace_metadata)) - for f in constraint.elements - ), - "args": ", ".join( - ["%s=%s" % (kwname, val) for kwname, val in opts] - ), - } - ) - - -@_constraint_renderers.dispatch_for(sa_schema.UniqueConstraint) -def _render_unique_constraint( - constraint: UniqueConstraint, - autogen_context: AutogenContext, - namespace_metadata: Optional[MetaData], -) -> str: - rendered = _user_defined_render("unique", constraint, autogen_context) - if rendered is not False: - return rendered - - return _uq_constraint(constraint, autogen_context, False) - - -@_constraint_renderers.dispatch_for(sa_schema.CheckConstraint) -def _render_check_constraint( - constraint: CheckConstraint, - autogen_context: AutogenContext, - namespace_metadata: Optional[MetaData], -) -> Optional[str]: - rendered = _user_defined_render("check", constraint, autogen_context) - if rendered is not False: - return rendered - - # detect the constraint being part of - # a parent type which is probably in the Table already. - # ideally SQLAlchemy would give us more of a first class - # way to detect this. - if ( - constraint._create_rule - and hasattr(constraint._create_rule, "target") - and isinstance( - constraint._create_rule.target, - sqltypes.TypeEngine, - ) - ): - return None - opts = [] - if constraint.name: - opts.append( - ("name", repr(_render_gen_name(autogen_context, constraint.name))) - ) - return "%(prefix)sCheckConstraint(%(sqltext)s%(opts)s)" % { - "prefix": _sqlalchemy_autogenerate_prefix(autogen_context), - "opts": ( - ", " + (", ".join("%s=%s" % (k, v) for k, v in opts)) - if opts - else "" - ), - "sqltext": _render_potential_expr( - constraint.sqltext, autogen_context, wrap_in_text=False - ), - } - - -@renderers.dispatch_for(ops.ExecuteSQLOp) -def _execute_sql(autogen_context: AutogenContext, op: ops.ExecuteSQLOp) -> str: - if not isinstance(op.sqltext, str): - raise NotImplementedError( - "Autogenerate rendering of SQL Expression language constructs " - "not supported here; please use a plain SQL string" - ) - return "op.execute(%r)" % op.sqltext - - -renderers = default_renderers.branch() diff --git a/myenv/Lib/site-packages/alembic/autogenerate/rewriter.py b/myenv/Lib/site-packages/alembic/autogenerate/rewriter.py deleted file mode 100644 index 8994dcf..0000000 --- a/myenv/Lib/site-packages/alembic/autogenerate/rewriter.py +++ /dev/null @@ -1,240 +0,0 @@ -from __future__ import annotations - -from typing import Any -from typing import Callable -from typing import Iterator -from typing import List -from typing import Tuple -from typing import Type -from typing import TYPE_CHECKING -from typing import Union - -from .. import util -from ..operations import ops - -if TYPE_CHECKING: - from ..operations.ops import AddColumnOp - from ..operations.ops import AlterColumnOp - from ..operations.ops import CreateTableOp - from ..operations.ops import DowngradeOps - from ..operations.ops import MigrateOperation - from ..operations.ops import MigrationScript - from ..operations.ops import ModifyTableOps - from ..operations.ops import OpContainer - from ..operations.ops import UpgradeOps - from ..runtime.migration import MigrationContext - from ..script.revision import _GetRevArg - -ProcessRevisionDirectiveFn = Callable[ - ["MigrationContext", "_GetRevArg", List["MigrationScript"]], None -] - - -class Rewriter: - """A helper object that allows easy 'rewriting' of ops streams. - - The :class:`.Rewriter` object is intended to be passed along - to the - :paramref:`.EnvironmentContext.configure.process_revision_directives` - parameter in an ``env.py`` script. Once constructed, any number - of "rewrites" functions can be associated with it, which will be given - the opportunity to modify the structure without having to have explicit - knowledge of the overall structure. - - The function is passed the :class:`.MigrationContext` object and - ``revision`` tuple that are passed to the :paramref:`.Environment - Context.configure.process_revision_directives` function normally, - and the third argument is an individual directive of the type - noted in the decorator. The function has the choice of returning - a single op directive, which normally can be the directive that - was actually passed, or a new directive to replace it, or a list - of zero or more directives to replace it. - - .. seealso:: - - :ref:`autogen_rewriter` - usage example - - """ - - _traverse = util.Dispatcher() - - _chained: Tuple[Union[ProcessRevisionDirectiveFn, Rewriter], ...] = () - - def __init__(self) -> None: - self.dispatch = util.Dispatcher() - - def chain( - self, - other: Union[ - ProcessRevisionDirectiveFn, - Rewriter, - ], - ) -> Rewriter: - """Produce a "chain" of this :class:`.Rewriter` to another. - - This allows two or more rewriters to operate serially on a stream, - e.g.:: - - writer1 = autogenerate.Rewriter() - writer2 = autogenerate.Rewriter() - - - @writer1.rewrites(ops.AddColumnOp) - def add_column_nullable(context, revision, op): - op.column.nullable = True - return op - - - @writer2.rewrites(ops.AddColumnOp) - def add_column_idx(context, revision, op): - idx_op = ops.CreateIndexOp( - "ixc", op.table_name, [op.column.name] - ) - return [op, idx_op] - - writer = writer1.chain(writer2) - - :param other: a :class:`.Rewriter` instance - :return: a new :class:`.Rewriter` that will run the operations - of this writer, then the "other" writer, in succession. - - """ - wr = self.__class__.__new__(self.__class__) - wr.__dict__.update(self.__dict__) - wr._chained += (other,) - return wr - - def rewrites( - self, - operator: Union[ - Type[AddColumnOp], - Type[MigrateOperation], - Type[AlterColumnOp], - Type[CreateTableOp], - Type[ModifyTableOps], - ], - ) -> Callable[..., Any]: - """Register a function as rewriter for a given type. - - The function should receive three arguments, which are - the :class:`.MigrationContext`, a ``revision`` tuple, and - an op directive of the type indicated. E.g.:: - - @writer1.rewrites(ops.AddColumnOp) - def add_column_nullable(context, revision, op): - op.column.nullable = True - return op - - """ - return self.dispatch.dispatch_for(operator) - - def _rewrite( - self, - context: MigrationContext, - revision: _GetRevArg, - directive: MigrateOperation, - ) -> Iterator[MigrateOperation]: - try: - _rewriter = self.dispatch.dispatch(directive) - except ValueError: - _rewriter = None - yield directive - else: - if self in directive._mutations: - yield directive - else: - for r_directive in util.to_list( - _rewriter(context, revision, directive), [] - ): - r_directive._mutations = r_directive._mutations.union( - [self] - ) - yield r_directive - - def __call__( - self, - context: MigrationContext, - revision: _GetRevArg, - directives: List[MigrationScript], - ) -> None: - self.process_revision_directives(context, revision, directives) - for process_revision_directives in self._chained: - process_revision_directives(context, revision, directives) - - @_traverse.dispatch_for(ops.MigrationScript) - def _traverse_script( - self, - context: MigrationContext, - revision: _GetRevArg, - directive: MigrationScript, - ) -> None: - upgrade_ops_list: List[UpgradeOps] = [] - for upgrade_ops in directive.upgrade_ops_list: - ret = self._traverse_for(context, revision, upgrade_ops) - if len(ret) != 1: - raise ValueError( - "Can only return single object for UpgradeOps traverse" - ) - upgrade_ops_list.append(ret[0]) - - directive.upgrade_ops = upgrade_ops_list # type: ignore - - downgrade_ops_list: List[DowngradeOps] = [] - for downgrade_ops in directive.downgrade_ops_list: - ret = self._traverse_for(context, revision, downgrade_ops) - if len(ret) != 1: - raise ValueError( - "Can only return single object for DowngradeOps traverse" - ) - downgrade_ops_list.append(ret[0]) - directive.downgrade_ops = downgrade_ops_list # type: ignore - - @_traverse.dispatch_for(ops.OpContainer) - def _traverse_op_container( - self, - context: MigrationContext, - revision: _GetRevArg, - directive: OpContainer, - ) -> None: - self._traverse_list(context, revision, directive.ops) - - @_traverse.dispatch_for(ops.MigrateOperation) - def _traverse_any_directive( - self, - context: MigrationContext, - revision: _GetRevArg, - directive: MigrateOperation, - ) -> None: - pass - - def _traverse_for( - self, - context: MigrationContext, - revision: _GetRevArg, - directive: MigrateOperation, - ) -> Any: - directives = list(self._rewrite(context, revision, directive)) - for directive in directives: - traverser = self._traverse.dispatch(directive) - traverser(self, context, revision, directive) - return directives - - def _traverse_list( - self, - context: MigrationContext, - revision: _GetRevArg, - directives: Any, - ) -> None: - dest = [] - for directive in directives: - dest.extend(self._traverse_for(context, revision, directive)) - - directives[:] = dest - - def process_revision_directives( - self, - context: MigrationContext, - revision: _GetRevArg, - directives: List[MigrationScript], - ) -> None: - self._traverse_list(context, revision, directives) diff --git a/myenv/Lib/site-packages/alembic/command.py b/myenv/Lib/site-packages/alembic/command.py deleted file mode 100644 index 89c1235..0000000 --- a/myenv/Lib/site-packages/alembic/command.py +++ /dev/null @@ -1,758 +0,0 @@ -# mypy: allow-untyped-defs, allow-untyped-calls - -from __future__ import annotations - -import os -from typing import List -from typing import Optional -from typing import TYPE_CHECKING -from typing import Union - -from . import autogenerate as autogen -from . import util -from .runtime.environment import EnvironmentContext -from .script import ScriptDirectory - -if TYPE_CHECKING: - from alembic.config import Config - from alembic.script.base import Script - from alembic.script.revision import _RevIdType - from .runtime.environment import ProcessRevisionDirectiveFn - - -def list_templates(config: Config) -> None: - """List available templates. - - :param config: a :class:`.Config` object. - - """ - - config.print_stdout("Available templates:\n") - for tempname in os.listdir(config.get_template_directory()): - with open( - os.path.join(config.get_template_directory(), tempname, "README") - ) as readme: - synopsis = next(readme).rstrip() - config.print_stdout("%s - %s", tempname, synopsis) - - config.print_stdout("\nTemplates are used via the 'init' command, e.g.:") - config.print_stdout("\n alembic init --template generic ./scripts") - - -def init( - config: Config, - directory: str, - template: str = "generic", - package: bool = False, -) -> None: - """Initialize a new scripts directory. - - :param config: a :class:`.Config` object. - - :param directory: string path of the target directory. - - :param template: string name of the migration environment template to - use. - - :param package: when True, write ``__init__.py`` files into the - environment location as well as the versions/ location. - - """ - - if os.access(directory, os.F_OK) and os.listdir(directory): - raise util.CommandError( - "Directory %s already exists and is not empty" % directory - ) - - template_dir = os.path.join(config.get_template_directory(), template) - if not os.access(template_dir, os.F_OK): - raise util.CommandError("No such template %r" % template) - - if not os.access(directory, os.F_OK): - with util.status( - f"Creating directory {os.path.abspath(directory)!r}", - **config.messaging_opts, - ): - os.makedirs(directory) - - versions = os.path.join(directory, "versions") - with util.status( - f"Creating directory {os.path.abspath(versions)!r}", - **config.messaging_opts, - ): - os.makedirs(versions) - - script = ScriptDirectory(directory) - - config_file: str | None = None - for file_ in os.listdir(template_dir): - file_path = os.path.join(template_dir, file_) - if file_ == "alembic.ini.mako": - assert config.config_file_name is not None - config_file = os.path.abspath(config.config_file_name) - if os.access(config_file, os.F_OK): - util.msg( - f"File {config_file!r} already exists, skipping", - **config.messaging_opts, - ) - else: - script._generate_template( - file_path, config_file, script_location=directory - ) - elif os.path.isfile(file_path): - output_file = os.path.join(directory, file_) - script._copy_file(file_path, output_file) - - if package: - for path in [ - os.path.join(os.path.abspath(directory), "__init__.py"), - os.path.join(os.path.abspath(versions), "__init__.py"), - ]: - with util.status(f"Adding {path!r}", **config.messaging_opts): - with open(path, "w"): - pass - - assert config_file is not None - util.msg( - "Please edit configuration/connection/logging " - f"settings in {config_file!r} before proceeding.", - **config.messaging_opts, - ) - - -def revision( - config: Config, - message: Optional[str] = None, - autogenerate: bool = False, - sql: bool = False, - head: str = "head", - splice: bool = False, - branch_label: Optional[_RevIdType] = None, - version_path: Optional[str] = None, - rev_id: Optional[str] = None, - depends_on: Optional[str] = None, - process_revision_directives: Optional[ProcessRevisionDirectiveFn] = None, -) -> Union[Optional[Script], List[Optional[Script]]]: - """Create a new revision file. - - :param config: a :class:`.Config` object. - - :param message: string message to apply to the revision; this is the - ``-m`` option to ``alembic revision``. - - :param autogenerate: whether or not to autogenerate the script from - the database; this is the ``--autogenerate`` option to - ``alembic revision``. - - :param sql: whether to dump the script out as a SQL string; when specified, - the script is dumped to stdout. This is the ``--sql`` option to - ``alembic revision``. - - :param head: head revision to build the new revision upon as a parent; - this is the ``--head`` option to ``alembic revision``. - - :param splice: whether or not the new revision should be made into a - new head of its own; is required when the given ``head`` is not itself - a head. This is the ``--splice`` option to ``alembic revision``. - - :param branch_label: string label to apply to the branch; this is the - ``--branch-label`` option to ``alembic revision``. - - :param version_path: string symbol identifying a specific version path - from the configuration; this is the ``--version-path`` option to - ``alembic revision``. - - :param rev_id: optional revision identifier to use instead of having - one generated; this is the ``--rev-id`` option to ``alembic revision``. - - :param depends_on: optional list of "depends on" identifiers; this is the - ``--depends-on`` option to ``alembic revision``. - - :param process_revision_directives: this is a callable that takes the - same form as the callable described at - :paramref:`.EnvironmentContext.configure.process_revision_directives`; - will be applied to the structure generated by the revision process - where it can be altered programmatically. Note that unlike all - the other parameters, this option is only available via programmatic - use of :func:`.command.revision`. - - """ - - script_directory = ScriptDirectory.from_config(config) - - command_args = dict( - message=message, - autogenerate=autogenerate, - sql=sql, - head=head, - splice=splice, - branch_label=branch_label, - version_path=version_path, - rev_id=rev_id, - depends_on=depends_on, - ) - revision_context = autogen.RevisionContext( - config, - script_directory, - command_args, - process_revision_directives=process_revision_directives, - ) - - environment = util.asbool(config.get_main_option("revision_environment")) - - if autogenerate: - environment = True - - if sql: - raise util.CommandError( - "Using --sql with --autogenerate does not make any sense" - ) - - def retrieve_migrations(rev, context): - revision_context.run_autogenerate(rev, context) - return [] - - elif environment: - - def retrieve_migrations(rev, context): - revision_context.run_no_autogenerate(rev, context) - return [] - - elif sql: - raise util.CommandError( - "Using --sql with the revision command when " - "revision_environment is not configured does not make any sense" - ) - - if environment: - with EnvironmentContext( - config, - script_directory, - fn=retrieve_migrations, - as_sql=sql, - template_args=revision_context.template_args, - revision_context=revision_context, - ): - script_directory.run_env() - - # the revision_context now has MigrationScript structure(s) present. - # these could theoretically be further processed / rewritten *here*, - # in addition to the hooks present within each run_migrations() call, - # or at the end of env.py run_migrations_online(). - - scripts = [script for script in revision_context.generate_scripts()] - if len(scripts) == 1: - return scripts[0] - else: - return scripts - - -def check(config: "Config") -> None: - """Check if revision command with autogenerate has pending upgrade ops. - - :param config: a :class:`.Config` object. - - .. versionadded:: 1.9.0 - - """ - - script_directory = ScriptDirectory.from_config(config) - - command_args = dict( - message=None, - autogenerate=True, - sql=False, - head="head", - splice=False, - branch_label=None, - version_path=None, - rev_id=None, - depends_on=None, - ) - revision_context = autogen.RevisionContext( - config, - script_directory, - command_args, - ) - - def retrieve_migrations(rev, context): - revision_context.run_autogenerate(rev, context) - return [] - - with EnvironmentContext( - config, - script_directory, - fn=retrieve_migrations, - as_sql=False, - template_args=revision_context.template_args, - revision_context=revision_context, - ): - script_directory.run_env() - - # the revision_context now has MigrationScript structure(s) present. - - migration_script = revision_context.generated_revisions[-1] - diffs = [] - for upgrade_ops in migration_script.upgrade_ops_list: - diffs.extend(upgrade_ops.as_diffs()) - - if diffs: - raise util.AutogenerateDiffsDetected( - f"New upgrade operations detected: {diffs}" - ) - else: - config.print_stdout("No new upgrade operations detected.") - - -def merge( - config: Config, - revisions: _RevIdType, - message: Optional[str] = None, - branch_label: Optional[_RevIdType] = None, - rev_id: Optional[str] = None, -) -> Optional[Script]: - """Merge two revisions together. Creates a new migration file. - - :param config: a :class:`.Config` instance - - :param revisions: The revisions to merge. - - :param message: string message to apply to the revision. - - :param branch_label: string label name to apply to the new revision. - - :param rev_id: hardcoded revision identifier instead of generating a new - one. - - .. seealso:: - - :ref:`branches` - - """ - - script = ScriptDirectory.from_config(config) - template_args = { - "config": config # Let templates use config for - # e.g. multiple databases - } - - environment = util.asbool(config.get_main_option("revision_environment")) - - if environment: - - def nothing(rev, context): - return [] - - with EnvironmentContext( - config, - script, - fn=nothing, - as_sql=False, - template_args=template_args, - ): - script.run_env() - - return script.generate_revision( - rev_id or util.rev_id(), - message, - refresh=True, - head=revisions, - branch_labels=branch_label, - **template_args, # type:ignore[arg-type] - ) - - -def upgrade( - config: Config, - revision: str, - sql: bool = False, - tag: Optional[str] = None, -) -> None: - """Upgrade to a later version. - - :param config: a :class:`.Config` instance. - - :param revision: string revision target or range for --sql mode. May be - ``"heads"`` to target the most recent revision(s). - - :param sql: if True, use ``--sql`` mode. - - :param tag: an arbitrary "tag" that can be intercepted by custom - ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument` - method. - - """ - - script = ScriptDirectory.from_config(config) - - starting_rev = None - if ":" in revision: - if not sql: - raise util.CommandError("Range revision not allowed") - starting_rev, revision = revision.split(":", 2) - - def upgrade(rev, context): - return script._upgrade_revs(revision, rev) - - with EnvironmentContext( - config, - script, - fn=upgrade, - as_sql=sql, - starting_rev=starting_rev, - destination_rev=revision, - tag=tag, - ): - script.run_env() - - -def downgrade( - config: Config, - revision: str, - sql: bool = False, - tag: Optional[str] = None, -) -> None: - """Revert to a previous version. - - :param config: a :class:`.Config` instance. - - :param revision: string revision target or range for --sql mode. May - be ``"base"`` to target the first revision. - - :param sql: if True, use ``--sql`` mode. - - :param tag: an arbitrary "tag" that can be intercepted by custom - ``env.py`` scripts via the :meth:`.EnvironmentContext.get_tag_argument` - method. - - """ - - script = ScriptDirectory.from_config(config) - starting_rev = None - if ":" in revision: - if not sql: - raise util.CommandError("Range revision not allowed") - starting_rev, revision = revision.split(":", 2) - elif sql: - raise util.CommandError( - "downgrade with --sql requires :" - ) - - def downgrade(rev, context): - return script._downgrade_revs(revision, rev) - - with EnvironmentContext( - config, - script, - fn=downgrade, - as_sql=sql, - starting_rev=starting_rev, - destination_rev=revision, - tag=tag, - ): - script.run_env() - - -def show(config: Config, rev: str) -> None: - """Show the revision(s) denoted by the given symbol. - - :param config: a :class:`.Config` instance. - - :param rev: string revision target. May be ``"current"`` to show the - revision(s) currently applied in the database. - - """ - - script = ScriptDirectory.from_config(config) - - if rev == "current": - - def show_current(rev, context): - for sc in script.get_revisions(rev): - config.print_stdout(sc.log_entry) - return [] - - with EnvironmentContext(config, script, fn=show_current): - script.run_env() - else: - for sc in script.get_revisions(rev): - config.print_stdout(sc.log_entry) - - -def history( - config: Config, - rev_range: Optional[str] = None, - verbose: bool = False, - indicate_current: bool = False, -) -> None: - """List changeset scripts in chronological order. - - :param config: a :class:`.Config` instance. - - :param rev_range: string revision range. - - :param verbose: output in verbose mode. - - :param indicate_current: indicate current revision. - - """ - base: Optional[str] - head: Optional[str] - script = ScriptDirectory.from_config(config) - if rev_range is not None: - if ":" not in rev_range: - raise util.CommandError( - "History range requires [start]:[end], " "[start]:, or :[end]" - ) - base, head = rev_range.strip().split(":") - else: - base = head = None - - environment = ( - util.asbool(config.get_main_option("revision_environment")) - or indicate_current - ) - - def _display_history(config, script, base, head, currents=()): - for sc in script.walk_revisions( - base=base or "base", head=head or "heads" - ): - if indicate_current: - sc._db_current_indicator = sc.revision in currents - - config.print_stdout( - sc.cmd_format( - verbose=verbose, - include_branches=True, - include_doc=True, - include_parents=True, - ) - ) - - def _display_history_w_current(config, script, base, head): - def _display_current_history(rev, context): - if head == "current": - _display_history(config, script, base, rev, rev) - elif base == "current": - _display_history(config, script, rev, head, rev) - else: - _display_history(config, script, base, head, rev) - return [] - - with EnvironmentContext(config, script, fn=_display_current_history): - script.run_env() - - if base == "current" or head == "current" or environment: - _display_history_w_current(config, script, base, head) - else: - _display_history(config, script, base, head) - - -def heads( - config: Config, verbose: bool = False, resolve_dependencies: bool = False -) -> None: - """Show current available heads in the script directory. - - :param config: a :class:`.Config` instance. - - :param verbose: output in verbose mode. - - :param resolve_dependencies: treat dependency version as down revisions. - - """ - - script = ScriptDirectory.from_config(config) - if resolve_dependencies: - heads = script.get_revisions("heads") - else: - heads = script.get_revisions(script.get_heads()) - - for rev in heads: - config.print_stdout( - rev.cmd_format( - verbose, include_branches=True, tree_indicators=False - ) - ) - - -def branches(config: Config, verbose: bool = False) -> None: - """Show current branch points. - - :param config: a :class:`.Config` instance. - - :param verbose: output in verbose mode. - - """ - script = ScriptDirectory.from_config(config) - for sc in script.walk_revisions(): - if sc.is_branch_point: - config.print_stdout( - "%s\n%s\n", - sc.cmd_format(verbose, include_branches=True), - "\n".join( - "%s -> %s" - % ( - " " * len(str(sc.revision)), - rev_obj.cmd_format( - False, include_branches=True, include_doc=verbose - ), - ) - for rev_obj in ( - script.get_revision(rev) for rev in sc.nextrev - ) - ), - ) - - -def current(config: Config, verbose: bool = False) -> None: - """Display the current revision for a database. - - :param config: a :class:`.Config` instance. - - :param verbose: output in verbose mode. - - """ - - script = ScriptDirectory.from_config(config) - - def display_version(rev, context): - if verbose: - config.print_stdout( - "Current revision(s) for %s:", - util.obfuscate_url_pw(context.connection.engine.url), - ) - for rev in script.get_all_current(rev): - config.print_stdout(rev.cmd_format(verbose)) - - return [] - - with EnvironmentContext( - config, script, fn=display_version, dont_mutate=True - ): - script.run_env() - - -def stamp( - config: Config, - revision: _RevIdType, - sql: bool = False, - tag: Optional[str] = None, - purge: bool = False, -) -> None: - """'stamp' the revision table with the given revision; don't - run any migrations. - - :param config: a :class:`.Config` instance. - - :param revision: target revision or list of revisions. May be a list - to indicate stamping of multiple branch heads; may be ``"base"`` - to remove all revisions from the table or ``"heads"`` to stamp the - most recent revision(s). - - .. note:: this parameter is called "revisions" in the command line - interface. - - :param sql: use ``--sql`` mode - - :param tag: an arbitrary "tag" that can be intercepted by custom - ``env.py`` scripts via the :class:`.EnvironmentContext.get_tag_argument` - method. - - :param purge: delete all entries in the version table before stamping. - - """ - - script = ScriptDirectory.from_config(config) - - if sql: - destination_revs = [] - starting_rev = None - for _revision in util.to_list(revision): - if ":" in _revision: - srev, _revision = _revision.split(":", 2) - - if starting_rev != srev: - if starting_rev is None: - starting_rev = srev - else: - raise util.CommandError( - "Stamp operation with --sql only supports a " - "single starting revision at a time" - ) - destination_revs.append(_revision) - else: - destination_revs = util.to_list(revision) - - def do_stamp(rev, context): - return script._stamp_revs(util.to_tuple(destination_revs), rev) - - with EnvironmentContext( - config, - script, - fn=do_stamp, - as_sql=sql, - starting_rev=starting_rev if sql else None, - destination_rev=util.to_tuple(destination_revs), - tag=tag, - purge=purge, - ): - script.run_env() - - -def edit(config: Config, rev: str) -> None: - """Edit revision script(s) using $EDITOR. - - :param config: a :class:`.Config` instance. - - :param rev: target revision. - - """ - - script = ScriptDirectory.from_config(config) - - if rev == "current": - - def edit_current(rev, context): - if not rev: - raise util.CommandError("No current revisions") - for sc in script.get_revisions(rev): - util.open_in_editor(sc.path) - return [] - - with EnvironmentContext(config, script, fn=edit_current): - script.run_env() - else: - revs = script.get_revisions(rev) - if not revs: - raise util.CommandError( - "No revision files indicated by symbol '%s'" % rev - ) - for sc in revs: - assert sc - util.open_in_editor(sc.path) - - -def ensure_version(config: Config, sql: bool = False) -> None: - """Create the alembic version table if it doesn't exist already . - - :param config: a :class:`.Config` instance. - - :param sql: use ``--sql`` mode. - - .. versionadded:: 1.7.6 - - """ - - script = ScriptDirectory.from_config(config) - - def do_ensure_version(rev, context): - context._ensure_version_table() - return [] - - with EnvironmentContext( - config, - script, - fn=do_ensure_version, - as_sql=sql, - ): - script.run_env() diff --git a/myenv/Lib/site-packages/alembic/config.py b/myenv/Lib/site-packages/alembic/config.py deleted file mode 100644 index 2c52e7c..0000000 --- a/myenv/Lib/site-packages/alembic/config.py +++ /dev/null @@ -1,640 +0,0 @@ -from __future__ import annotations - -from argparse import ArgumentParser -from argparse import Namespace -from configparser import ConfigParser -import inspect -import os -import sys -from typing import Any -from typing import cast -from typing import Dict -from typing import Mapping -from typing import Optional -from typing import overload -from typing import Sequence -from typing import TextIO -from typing import Union - -from typing_extensions import TypedDict - -from . import __version__ -from . import command -from . import util -from .util import compat - - -class Config: - r"""Represent an Alembic configuration. - - Within an ``env.py`` script, this is available - via the :attr:`.EnvironmentContext.config` attribute, - which in turn is available at ``alembic.context``:: - - from alembic import context - - some_param = context.config.get_main_option("my option") - - When invoking Alembic programmatically, a new - :class:`.Config` can be created by passing - the name of an .ini file to the constructor:: - - from alembic.config import Config - alembic_cfg = Config("/path/to/yourapp/alembic.ini") - - With a :class:`.Config` object, you can then - run Alembic commands programmatically using the directives - in :mod:`alembic.command`. - - The :class:`.Config` object can also be constructed without - a filename. Values can be set programmatically, and - new sections will be created as needed:: - - from alembic.config import Config - alembic_cfg = Config() - alembic_cfg.set_main_option("script_location", "myapp:migrations") - alembic_cfg.set_main_option("sqlalchemy.url", "postgresql://foo/bar") - alembic_cfg.set_section_option("mysection", "foo", "bar") - - .. warning:: - - When using programmatic configuration, make sure the - ``env.py`` file in use is compatible with the target configuration; - including that the call to Python ``logging.fileConfig()`` is - omitted if the programmatic configuration doesn't actually include - logging directives. - - For passing non-string values to environments, such as connections and - engines, use the :attr:`.Config.attributes` dictionary:: - - with engine.begin() as connection: - alembic_cfg.attributes['connection'] = connection - command.upgrade(alembic_cfg, "head") - - :param file\_: name of the .ini file to open. - :param ini_section: name of the main Alembic section within the - .ini file - :param output_buffer: optional file-like input buffer which - will be passed to the :class:`.MigrationContext` - used to redirect - the output of "offline generation" when using Alembic programmatically. - :param stdout: buffer where the "print" output of commands will be sent. - Defaults to ``sys.stdout``. - - :param config_args: A dictionary of keys and values that will be used - for substitution in the alembic config file. The dictionary as given - is **copied** to a new one, stored locally as the attribute - ``.config_args``. When the :attr:`.Config.file_config` attribute is - first invoked, the replacement variable ``here`` will be added to this - dictionary before the dictionary is passed to ``ConfigParser()`` - to parse the .ini file. - - :param attributes: optional dictionary of arbitrary Python keys/values, - which will be populated into the :attr:`.Config.attributes` dictionary. - - .. seealso:: - - :ref:`connection_sharing` - - """ - - def __init__( - self, - file_: Union[str, os.PathLike[str], None] = None, - ini_section: str = "alembic", - output_buffer: Optional[TextIO] = None, - stdout: TextIO = sys.stdout, - cmd_opts: Optional[Namespace] = None, - config_args: Mapping[str, Any] = util.immutabledict(), - attributes: Optional[Dict[str, Any]] = None, - ) -> None: - """Construct a new :class:`.Config`""" - self.config_file_name = file_ - self.config_ini_section = ini_section - self.output_buffer = output_buffer - self.stdout = stdout - self.cmd_opts = cmd_opts - self.config_args = dict(config_args) - if attributes: - self.attributes.update(attributes) - - cmd_opts: Optional[Namespace] = None - """The command-line options passed to the ``alembic`` script. - - Within an ``env.py`` script this can be accessed via the - :attr:`.EnvironmentContext.config` attribute. - - .. seealso:: - - :meth:`.EnvironmentContext.get_x_argument` - - """ - - config_file_name: Union[str, os.PathLike[str], None] = None - """Filesystem path to the .ini file in use.""" - - config_ini_section: str = None # type:ignore[assignment] - """Name of the config file section to read basic configuration - from. Defaults to ``alembic``, that is the ``[alembic]`` section - of the .ini file. This value is modified using the ``-n/--name`` - option to the Alembic runner. - - """ - - @util.memoized_property - def attributes(self) -> Dict[str, Any]: - """A Python dictionary for storage of additional state. - - - This is a utility dictionary which can include not just strings but - engines, connections, schema objects, or anything else. - Use this to pass objects into an env.py script, such as passing - a :class:`sqlalchemy.engine.base.Connection` when calling - commands from :mod:`alembic.command` programmatically. - - .. seealso:: - - :ref:`connection_sharing` - - :paramref:`.Config.attributes` - - """ - return {} - - def print_stdout(self, text: str, *arg: Any) -> None: - """Render a message to standard out. - - When :meth:`.Config.print_stdout` is called with additional args - those arguments will formatted against the provided text, - otherwise we simply output the provided text verbatim. - - This is a no-op when the``quiet`` messaging option is enabled. - - e.g.:: - - >>> config.print_stdout('Some text %s', 'arg') - Some Text arg - - """ - - if arg: - output = str(text) % arg - else: - output = str(text) - - util.write_outstream(self.stdout, output, "\n", **self.messaging_opts) - - @util.memoized_property - def file_config(self) -> ConfigParser: - """Return the underlying ``ConfigParser`` object. - - Direct access to the .ini file is available here, - though the :meth:`.Config.get_section` and - :meth:`.Config.get_main_option` - methods provide a possibly simpler interface. - - """ - - if self.config_file_name: - here = os.path.abspath(os.path.dirname(self.config_file_name)) - else: - here = "" - self.config_args["here"] = here - file_config = ConfigParser(self.config_args) - if self.config_file_name: - compat.read_config_parser(file_config, [self.config_file_name]) - else: - file_config.add_section(self.config_ini_section) - return file_config - - def get_template_directory(self) -> str: - """Return the directory where Alembic setup templates are found. - - This method is used by the alembic ``init`` and ``list_templates`` - commands. - - """ - import alembic - - package_dir = os.path.abspath(os.path.dirname(alembic.__file__)) - return os.path.join(package_dir, "templates") - - @overload - def get_section( - self, name: str, default: None = ... - ) -> Optional[Dict[str, str]]: ... - - # "default" here could also be a TypeVar - # _MT = TypeVar("_MT", bound=Mapping[str, str]), - # however mypy wasn't handling that correctly (pyright was) - @overload - def get_section( - self, name: str, default: Dict[str, str] - ) -> Dict[str, str]: ... - - @overload - def get_section( - self, name: str, default: Mapping[str, str] - ) -> Union[Dict[str, str], Mapping[str, str]]: ... - - def get_section( - self, name: str, default: Optional[Mapping[str, str]] = None - ) -> Optional[Mapping[str, str]]: - """Return all the configuration options from a given .ini file section - as a dictionary. - - If the given section does not exist, the value of ``default`` - is returned, which is expected to be a dictionary or other mapping. - - """ - if not self.file_config.has_section(name): - return default - - return dict(self.file_config.items(name)) - - def set_main_option(self, name: str, value: str) -> None: - """Set an option programmatically within the 'main' section. - - This overrides whatever was in the .ini file. - - :param name: name of the value - - :param value: the value. Note that this value is passed to - ``ConfigParser.set``, which supports variable interpolation using - pyformat (e.g. ``%(some_value)s``). A raw percent sign not part of - an interpolation symbol must therefore be escaped, e.g. ``%%``. - The given value may refer to another value already in the file - using the interpolation format. - - """ - self.set_section_option(self.config_ini_section, name, value) - - def remove_main_option(self, name: str) -> None: - self.file_config.remove_option(self.config_ini_section, name) - - def set_section_option(self, section: str, name: str, value: str) -> None: - """Set an option programmatically within the given section. - - The section is created if it doesn't exist already. - The value here will override whatever was in the .ini - file. - - :param section: name of the section - - :param name: name of the value - - :param value: the value. Note that this value is passed to - ``ConfigParser.set``, which supports variable interpolation using - pyformat (e.g. ``%(some_value)s``). A raw percent sign not part of - an interpolation symbol must therefore be escaped, e.g. ``%%``. - The given value may refer to another value already in the file - using the interpolation format. - - """ - - if not self.file_config.has_section(section): - self.file_config.add_section(section) - self.file_config.set(section, name, value) - - def get_section_option( - self, section: str, name: str, default: Optional[str] = None - ) -> Optional[str]: - """Return an option from the given section of the .ini file.""" - if not self.file_config.has_section(section): - raise util.CommandError( - "No config file %r found, or file has no " - "'[%s]' section" % (self.config_file_name, section) - ) - if self.file_config.has_option(section, name): - return self.file_config.get(section, name) - else: - return default - - @overload - def get_main_option(self, name: str, default: str) -> str: ... - - @overload - def get_main_option( - self, name: str, default: Optional[str] = None - ) -> Optional[str]: ... - - def get_main_option( - self, name: str, default: Optional[str] = None - ) -> Optional[str]: - """Return an option from the 'main' section of the .ini file. - - This defaults to being a key from the ``[alembic]`` - section, unless the ``-n/--name`` flag were used to - indicate a different section. - - """ - return self.get_section_option(self.config_ini_section, name, default) - - @util.memoized_property - def messaging_opts(self) -> MessagingOptions: - """The messaging options.""" - return cast( - MessagingOptions, - util.immutabledict( - {"quiet": getattr(self.cmd_opts, "quiet", False)} - ), - ) - - -class MessagingOptions(TypedDict, total=False): - quiet: bool - - -class CommandLine: - def __init__(self, prog: Optional[str] = None) -> None: - self._generate_args(prog) - - def _generate_args(self, prog: Optional[str]) -> None: - def add_options( - fn: Any, parser: Any, positional: Any, kwargs: Any - ) -> None: - kwargs_opts = { - "template": ( - "-t", - "--template", - dict( - default="generic", - type=str, - help="Setup template for use with 'init'", - ), - ), - "message": ( - "-m", - "--message", - dict( - type=str, help="Message string to use with 'revision'" - ), - ), - "sql": ( - "--sql", - dict( - action="store_true", - help="Don't emit SQL to database - dump to " - "standard output/file instead. See docs on " - "offline mode.", - ), - ), - "tag": ( - "--tag", - dict( - type=str, - help="Arbitrary 'tag' name - can be used by " - "custom env.py scripts.", - ), - ), - "head": ( - "--head", - dict( - type=str, - help="Specify head revision or @head " - "to base new revision on.", - ), - ), - "splice": ( - "--splice", - dict( - action="store_true", - help="Allow a non-head revision as the " - "'head' to splice onto", - ), - ), - "depends_on": ( - "--depends-on", - dict( - action="append", - help="Specify one or more revision identifiers " - "which this revision should depend on.", - ), - ), - "rev_id": ( - "--rev-id", - dict( - type=str, - help="Specify a hardcoded revision id instead of " - "generating one", - ), - ), - "version_path": ( - "--version-path", - dict( - type=str, - help="Specify specific path from config for " - "version file", - ), - ), - "branch_label": ( - "--branch-label", - dict( - type=str, - help="Specify a branch label to apply to the " - "new revision", - ), - ), - "verbose": ( - "-v", - "--verbose", - dict(action="store_true", help="Use more verbose output"), - ), - "resolve_dependencies": ( - "--resolve-dependencies", - dict( - action="store_true", - help="Treat dependency versions as down revisions", - ), - ), - "autogenerate": ( - "--autogenerate", - dict( - action="store_true", - help="Populate revision script with candidate " - "migration operations, based on comparison " - "of database to model.", - ), - ), - "rev_range": ( - "-r", - "--rev-range", - dict( - action="store", - help="Specify a revision range; " - "format is [start]:[end]", - ), - ), - "indicate_current": ( - "-i", - "--indicate-current", - dict( - action="store_true", - help="Indicate the current revision", - ), - ), - "purge": ( - "--purge", - dict( - action="store_true", - help="Unconditionally erase the version table " - "before stamping", - ), - ), - "package": ( - "--package", - dict( - action="store_true", - help="Write empty __init__.py files to the " - "environment and version locations", - ), - ), - } - positional_help = { - "directory": "location of scripts directory", - "revision": "revision identifier", - "revisions": "one or more revisions, or 'heads' for all heads", - } - for arg in kwargs: - if arg in kwargs_opts: - args = kwargs_opts[arg] - args, kw = args[0:-1], args[-1] - parser.add_argument(*args, **kw) - - for arg in positional: - if ( - arg == "revisions" - or fn in positional_translations - and positional_translations[fn][arg] == "revisions" - ): - subparser.add_argument( - "revisions", - nargs="+", - help=positional_help.get("revisions"), - ) - else: - subparser.add_argument(arg, help=positional_help.get(arg)) - - parser = ArgumentParser(prog=prog) - - parser.add_argument( - "--version", action="version", version="%%(prog)s %s" % __version__ - ) - parser.add_argument( - "-c", - "--config", - type=str, - default=os.environ.get("ALEMBIC_CONFIG", "alembic.ini"), - help="Alternate config file; defaults to value of " - 'ALEMBIC_CONFIG environment variable, or "alembic.ini"', - ) - parser.add_argument( - "-n", - "--name", - type=str, - default="alembic", - help="Name of section in .ini file to " "use for Alembic config", - ) - parser.add_argument( - "-x", - action="append", - help="Additional arguments consumed by " - "custom env.py scripts, e.g. -x " - "setting1=somesetting -x setting2=somesetting", - ) - parser.add_argument( - "--raiseerr", - action="store_true", - help="Raise a full stack trace on error", - ) - parser.add_argument( - "-q", - "--quiet", - action="store_true", - help="Do not log to std output.", - ) - subparsers = parser.add_subparsers() - - positional_translations: Dict[Any, Any] = { - command.stamp: {"revision": "revisions"} - } - - for fn in [getattr(command, n) for n in dir(command)]: - if ( - inspect.isfunction(fn) - and fn.__name__[0] != "_" - and fn.__module__ == "alembic.command" - ): - spec = compat.inspect_getfullargspec(fn) - if spec[3] is not None: - positional = spec[0][1 : -len(spec[3])] - kwarg = spec[0][-len(spec[3]) :] - else: - positional = spec[0][1:] - kwarg = [] - - if fn in positional_translations: - positional = [ - positional_translations[fn].get(name, name) - for name in positional - ] - - # parse first line(s) of helptext without a line break - help_ = fn.__doc__ - if help_: - help_text = [] - for line in help_.split("\n"): - if not line.strip(): - break - else: - help_text.append(line.strip()) - else: - help_text = [] - subparser = subparsers.add_parser( - fn.__name__, help=" ".join(help_text) - ) - add_options(fn, subparser, positional, kwarg) - subparser.set_defaults(cmd=(fn, positional, kwarg)) - self.parser = parser - - def run_cmd(self, config: Config, options: Namespace) -> None: - fn, positional, kwarg = options.cmd - - try: - fn( - config, - *[getattr(options, k, None) for k in positional], - **{k: getattr(options, k, None) for k in kwarg}, - ) - except util.CommandError as e: - if options.raiseerr: - raise - else: - util.err(str(e), **config.messaging_opts) - - def main(self, argv: Optional[Sequence[str]] = None) -> None: - options = self.parser.parse_args(argv) - if not hasattr(options, "cmd"): - # see http://bugs.python.org/issue9253, argparse - # behavior changed incompatibly in py3.3 - self.parser.error("too few arguments") - else: - cfg = Config( - file_=options.config, - ini_section=options.name, - cmd_opts=options, - ) - self.run_cmd(cfg, options) - - -def main( - argv: Optional[Sequence[str]] = None, - prog: Optional[str] = None, - **kwargs: Any, -) -> None: - """The console runner function for Alembic.""" - - CommandLine(prog=prog).main(argv=argv) - - -if __name__ == "__main__": - main() diff --git a/myenv/Lib/site-packages/alembic/context.py b/myenv/Lib/site-packages/alembic/context.py deleted file mode 100644 index 758fca8..0000000 --- a/myenv/Lib/site-packages/alembic/context.py +++ /dev/null @@ -1,5 +0,0 @@ -from .runtime.environment import EnvironmentContext - -# create proxy functions for -# each method on the EnvironmentContext class. -EnvironmentContext.create_module_class_proxy(globals(), locals()) diff --git a/myenv/Lib/site-packages/alembic/context.pyi b/myenv/Lib/site-packages/alembic/context.pyi deleted file mode 100644 index 80619fb..0000000 --- a/myenv/Lib/site-packages/alembic/context.pyi +++ /dev/null @@ -1,853 +0,0 @@ -# ### this file stubs are generated by tools/write_pyi.py - do not edit ### -# ### imports are manually managed -from __future__ import annotations - -from typing import Any -from typing import Callable -from typing import Collection -from typing import ContextManager -from typing import Dict -from typing import Iterable -from typing import List -from typing import Literal -from typing import Mapping -from typing import MutableMapping -from typing import Optional -from typing import overload -from typing import Sequence -from typing import TextIO -from typing import Tuple -from typing import TYPE_CHECKING -from typing import Union - -if TYPE_CHECKING: - from sqlalchemy.engine.base import Connection - from sqlalchemy.engine.url import URL - from sqlalchemy.sql import Executable - from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import FetchedValue - from sqlalchemy.sql.schema import MetaData - from sqlalchemy.sql.schema import SchemaItem - from sqlalchemy.sql.type_api import TypeEngine - - from .autogenerate.api import AutogenContext - from .config import Config - from .operations.ops import MigrationScript - from .runtime.migration import _ProxyTransaction - from .runtime.migration import MigrationContext - from .runtime.migration import MigrationInfo - from .script import ScriptDirectory - -### end imports ### - -def begin_transaction() -> Union[_ProxyTransaction, ContextManager[None]]: - """Return a context manager that will - enclose an operation within a "transaction", - as defined by the environment's offline - and transactional DDL settings. - - e.g.:: - - with context.begin_transaction(): - context.run_migrations() - - :meth:`.begin_transaction` is intended to - "do the right thing" regardless of - calling context: - - * If :meth:`.is_transactional_ddl` is ``False``, - returns a "do nothing" context manager - which otherwise produces no transactional - state or directives. - * If :meth:`.is_offline_mode` is ``True``, - returns a context manager that will - invoke the :meth:`.DefaultImpl.emit_begin` - and :meth:`.DefaultImpl.emit_commit` - methods, which will produce the string - directives ``BEGIN`` and ``COMMIT`` on - the output stream, as rendered by the - target backend (e.g. SQL Server would - emit ``BEGIN TRANSACTION``). - * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin` - on the current online connection, which - returns a :class:`sqlalchemy.engine.Transaction` - object. This object demarcates a real - transaction and is itself a context manager, - which will roll back if an exception - is raised. - - Note that a custom ``env.py`` script which - has more specific transactional needs can of course - manipulate the :class:`~sqlalchemy.engine.Connection` - directly to produce transactional state in "online" - mode. - - """ - -config: Config - -def configure( - connection: Optional[Connection] = None, - url: Union[str, URL, None] = None, - dialect_name: Optional[str] = None, - dialect_opts: Optional[Dict[str, Any]] = None, - transactional_ddl: Optional[bool] = None, - transaction_per_migration: bool = False, - output_buffer: Optional[TextIO] = None, - starting_rev: Optional[str] = None, - tag: Optional[str] = None, - template_args: Optional[Dict[str, Any]] = None, - render_as_batch: bool = False, - target_metadata: Union[MetaData, Sequence[MetaData], None] = None, - include_name: Optional[ - Callable[ - [ - Optional[str], - Literal[ - "schema", - "table", - "column", - "index", - "unique_constraint", - "foreign_key_constraint", - ], - MutableMapping[ - Literal[ - "schema_name", - "table_name", - "schema_qualified_table_name", - ], - Optional[str], - ], - ], - bool, - ] - ] = None, - include_object: Optional[ - Callable[ - [ - SchemaItem, - Optional[str], - Literal[ - "schema", - "table", - "column", - "index", - "unique_constraint", - "foreign_key_constraint", - ], - bool, - Optional[SchemaItem], - ], - bool, - ] - ] = None, - include_schemas: bool = False, - process_revision_directives: Optional[ - Callable[ - [ - MigrationContext, - Union[str, Iterable[Optional[str]], Iterable[str]], - List[MigrationScript], - ], - None, - ] - ] = None, - compare_type: Union[ - bool, - Callable[ - [ - MigrationContext, - Column[Any], - Column[Any], - TypeEngine[Any], - TypeEngine[Any], - ], - Optional[bool], - ], - ] = True, - compare_server_default: Union[ - bool, - Callable[ - [ - MigrationContext, - Column[Any], - Column[Any], - Optional[str], - Optional[FetchedValue], - Optional[str], - ], - Optional[bool], - ], - ] = False, - render_item: Optional[ - Callable[[str, Any, AutogenContext], Union[str, Literal[False]]] - ] = None, - literal_binds: bool = False, - upgrade_token: str = "upgrades", - downgrade_token: str = "downgrades", - alembic_module_prefix: str = "op.", - sqlalchemy_module_prefix: str = "sa.", - user_module_prefix: Optional[str] = None, - on_version_apply: Optional[ - Callable[ - [ - MigrationContext, - MigrationInfo, - Collection[Any], - Mapping[str, Any], - ], - None, - ] - ] = None, - **kw: Any, -) -> None: - """Configure a :class:`.MigrationContext` within this - :class:`.EnvironmentContext` which will provide database - connectivity and other configuration to a series of - migration scripts. - - Many methods on :class:`.EnvironmentContext` require that - this method has been called in order to function, as they - ultimately need to have database access or at least access - to the dialect in use. Those which do are documented as such. - - The important thing needed by :meth:`.configure` is a - means to determine what kind of database dialect is in use. - An actual connection to that database is needed only if - the :class:`.MigrationContext` is to be used in - "online" mode. - - If the :meth:`.is_offline_mode` function returns ``True``, - then no connection is needed here. Otherwise, the - ``connection`` parameter should be present as an - instance of :class:`sqlalchemy.engine.Connection`. - - This function is typically called from the ``env.py`` - script within a migration environment. It can be called - multiple times for an invocation. The most recent - :class:`~sqlalchemy.engine.Connection` - for which it was called is the one that will be operated upon - by the next call to :meth:`.run_migrations`. - - General parameters: - - :param connection: a :class:`~sqlalchemy.engine.Connection` - to use - for SQL execution in "online" mode. When present, is also - used to determine the type of dialect in use. - :param url: a string database url, or a - :class:`sqlalchemy.engine.url.URL` object. - The type of dialect to be used will be derived from this if - ``connection`` is not passed. - :param dialect_name: string name of a dialect, such as - "postgresql", "mssql", etc. - The type of dialect to be used will be derived from this if - ``connection`` and ``url`` are not passed. - :param dialect_opts: dictionary of options to be passed to dialect - constructor. - :param transactional_ddl: Force the usage of "transactional" - DDL on or off; - this otherwise defaults to whether or not the dialect in - use supports it. - :param transaction_per_migration: if True, nest each migration script - in a transaction rather than the full series of migrations to - run. - :param output_buffer: a file-like object that will be used - for textual output - when the ``--sql`` option is used to generate SQL scripts. - Defaults to - ``sys.stdout`` if not passed here and also not present on - the :class:`.Config` - object. The value here overrides that of the :class:`.Config` - object. - :param output_encoding: when using ``--sql`` to generate SQL - scripts, apply this encoding to the string output. - :param literal_binds: when using ``--sql`` to generate SQL - scripts, pass through the ``literal_binds`` flag to the compiler - so that any literal values that would ordinarily be bound - parameters are converted to plain strings. - - .. warning:: Dialects can typically only handle simple datatypes - like strings and numbers for auto-literal generation. Datatypes - like dates, intervals, and others may still require manual - formatting, typically using :meth:`.Operations.inline_literal`. - - .. note:: the ``literal_binds`` flag is ignored on SQLAlchemy - versions prior to 0.8 where this feature is not supported. - - .. seealso:: - - :meth:`.Operations.inline_literal` - - :param starting_rev: Override the "starting revision" argument - when using ``--sql`` mode. - :param tag: a string tag for usage by custom ``env.py`` scripts. - Set via the ``--tag`` option, can be overridden here. - :param template_args: dictionary of template arguments which - will be added to the template argument environment when - running the "revision" command. Note that the script environment - is only run within the "revision" command if the --autogenerate - option is used, or if the option "revision_environment=true" - is present in the alembic.ini file. - - :param version_table: The name of the Alembic version table. - The default is ``'alembic_version'``. - :param version_table_schema: Optional schema to place version - table within. - :param version_table_pk: boolean, whether the Alembic version table - should use a primary key constraint for the "value" column; this - only takes effect when the table is first created. - Defaults to True; setting to False should not be necessary and is - here for backwards compatibility reasons. - :param on_version_apply: a callable or collection of callables to be - run for each migration step. - The callables will be run in the order they are given, once for - each migration step, after the respective operation has been - applied but before its transaction is finalized. - Each callable accepts no positional arguments and the following - keyword arguments: - - * ``ctx``: the :class:`.MigrationContext` running the migration, - * ``step``: a :class:`.MigrationInfo` representing the - step currently being applied, - * ``heads``: a collection of version strings representing the - current heads, - * ``run_args``: the ``**kwargs`` passed to :meth:`.run_migrations`. - - Parameters specific to the autogenerate feature, when - ``alembic revision`` is run with the ``--autogenerate`` feature: - - :param target_metadata: a :class:`sqlalchemy.schema.MetaData` - object, or a sequence of :class:`~sqlalchemy.schema.MetaData` - objects, that will be consulted during autogeneration. - The tables present in each :class:`~sqlalchemy.schema.MetaData` - will be compared against - what is locally available on the target - :class:`~sqlalchemy.engine.Connection` - to produce candidate upgrade/downgrade operations. - :param compare_type: Indicates type comparison behavior during - an autogenerate - operation. Defaults to ``True`` turning on type comparison, which - has good accuracy on most backends. See :ref:`compare_types` - for an example as well as information on other type - comparison options. Set to ``False`` which disables type - comparison. A callable can also be passed to provide custom type - comparison, see :ref:`compare_types` for additional details. - - .. versionchanged:: 1.12.0 The default value of - :paramref:`.EnvironmentContext.configure.compare_type` has been - changed to ``True``. - - .. seealso:: - - :ref:`compare_types` - - :paramref:`.EnvironmentContext.configure.compare_server_default` - - :param compare_server_default: Indicates server default comparison - behavior during - an autogenerate operation. Defaults to ``False`` which disables - server default - comparison. Set to ``True`` to turn on server default comparison, - which has - varied accuracy depending on backend. - - To customize server default comparison behavior, a callable may - be specified - which can filter server default comparisons during an - autogenerate operation. - defaults during an autogenerate operation. The format of this - callable is:: - - def my_compare_server_default(context, inspected_column, - metadata_column, inspected_default, metadata_default, - rendered_metadata_default): - # return True if the defaults are different, - # False if not, or None to allow the default implementation - # to compare these defaults - return None - - context.configure( - # ... - compare_server_default = my_compare_server_default - ) - - ``inspected_column`` is a dictionary structure as returned by - :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas - ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from - the local model environment. - - A return value of ``None`` indicates to allow default server default - comparison - to proceed. Note that some backends such as Postgresql actually - execute - the two defaults on the database side to compare for equivalence. - - .. seealso:: - - :paramref:`.EnvironmentContext.configure.compare_type` - - :param include_name: A callable function which is given - the chance to return ``True`` or ``False`` for any database reflected - object based on its name, including database schema names when - the :paramref:`.EnvironmentContext.configure.include_schemas` flag - is set to ``True``. - - The function accepts the following positional arguments: - - * ``name``: the name of the object, such as schema name or table name. - Will be ``None`` when indicating the default schema name of the - database connection. - * ``type``: a string describing the type of object; currently - ``"schema"``, ``"table"``, ``"column"``, ``"index"``, - ``"unique_constraint"``, or ``"foreign_key_constraint"`` - * ``parent_names``: a dictionary of "parent" object names, that are - relative to the name being given. Keys in this dictionary may - include: ``"schema_name"``, ``"table_name"`` or - ``"schema_qualified_table_name"``. - - E.g.:: - - def include_name(name, type_, parent_names): - if type_ == "schema": - return name in ["schema_one", "schema_two"] - else: - return True - - context.configure( - # ... - include_schemas = True, - include_name = include_name - ) - - .. seealso:: - - :ref:`autogenerate_include_hooks` - - :paramref:`.EnvironmentContext.configure.include_object` - - :paramref:`.EnvironmentContext.configure.include_schemas` - - - :param include_object: A callable function which is given - the chance to return ``True`` or ``False`` for any object, - indicating if the given object should be considered in the - autogenerate sweep. - - The function accepts the following positional arguments: - - * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such - as a :class:`~sqlalchemy.schema.Table`, - :class:`~sqlalchemy.schema.Column`, - :class:`~sqlalchemy.schema.Index` - :class:`~sqlalchemy.schema.UniqueConstraint`, - or :class:`~sqlalchemy.schema.ForeignKeyConstraint` object - * ``name``: the name of the object. This is typically available - via ``object.name``. - * ``type``: a string describing the type of object; currently - ``"table"``, ``"column"``, ``"index"``, ``"unique_constraint"``, - or ``"foreign_key_constraint"`` - * ``reflected``: ``True`` if the given object was produced based on - table reflection, ``False`` if it's from a local :class:`.MetaData` - object. - * ``compare_to``: the object being compared against, if available, - else ``None``. - - E.g.:: - - def include_object(object, name, type_, reflected, compare_to): - if (type_ == "column" and - not reflected and - object.info.get("skip_autogenerate", False)): - return False - else: - return True - - context.configure( - # ... - include_object = include_object - ) - - For the use case of omitting specific schemas from a target database - when :paramref:`.EnvironmentContext.configure.include_schemas` is - set to ``True``, the :attr:`~sqlalchemy.schema.Table.schema` - attribute can be checked for each :class:`~sqlalchemy.schema.Table` - object passed to the hook, however it is much more efficient - to filter on schemas before reflection of objects takes place - using the :paramref:`.EnvironmentContext.configure.include_name` - hook. - - .. seealso:: - - :ref:`autogenerate_include_hooks` - - :paramref:`.EnvironmentContext.configure.include_name` - - :paramref:`.EnvironmentContext.configure.include_schemas` - - :param render_as_batch: if True, commands which alter elements - within a table will be placed under a ``with batch_alter_table():`` - directive, so that batch migrations will take place. - - .. seealso:: - - :ref:`batch_migrations` - - :param include_schemas: If True, autogenerate will scan across - all schemas located by the SQLAlchemy - :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names` - method, and include all differences in tables found across all - those schemas. When using this option, you may want to also - use the :paramref:`.EnvironmentContext.configure.include_name` - parameter to specify a callable which - can filter the tables/schemas that get included. - - .. seealso:: - - :ref:`autogenerate_include_hooks` - - :paramref:`.EnvironmentContext.configure.include_name` - - :paramref:`.EnvironmentContext.configure.include_object` - - :param render_item: Callable that can be used to override how - any schema item, i.e. column, constraint, type, - etc., is rendered for autogenerate. The callable receives a - string describing the type of object, the object, and - the autogen context. If it returns False, the - default rendering method will be used. If it returns None, - the item will not be rendered in the context of a Table - construct, that is, can be used to skip columns or constraints - within op.create_table():: - - def my_render_column(type_, col, autogen_context): - if type_ == "column" and isinstance(col, MySpecialCol): - return repr(col) - else: - return False - - context.configure( - # ... - render_item = my_render_column - ) - - Available values for the type string include: ``"column"``, - ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``, - ``"type"``, ``"server_default"``. - - .. seealso:: - - :ref:`autogen_render_types` - - :param upgrade_token: When autogenerate completes, the text of the - candidate upgrade operations will be present in this template - variable when ``script.py.mako`` is rendered. Defaults to - ``upgrades``. - :param downgrade_token: When autogenerate completes, the text of the - candidate downgrade operations will be present in this - template variable when ``script.py.mako`` is rendered. Defaults to - ``downgrades``. - - :param alembic_module_prefix: When autogenerate refers to Alembic - :mod:`alembic.operations` constructs, this prefix will be used - (i.e. ``op.create_table``) Defaults to "``op.``". - Can be ``None`` to indicate no prefix. - - :param sqlalchemy_module_prefix: When autogenerate refers to - SQLAlchemy - :class:`~sqlalchemy.schema.Column` or type classes, this prefix - will be used - (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``". - Can be ``None`` to indicate no prefix. - Note that when dialect-specific types are rendered, autogenerate - will render them using the dialect module name, i.e. ``mssql.BIT()``, - ``postgresql.UUID()``. - - :param user_module_prefix: When autogenerate refers to a SQLAlchemy - type (e.g. :class:`.TypeEngine`) where the module name is not - under the ``sqlalchemy`` namespace, this prefix will be used - within autogenerate. If left at its default of - ``None``, the ``__module__`` attribute of the type is used to - render the import module. It's a good practice to set this - and to have all custom types be available from a fixed module space, - in order to future-proof migration files against reorganizations - in modules. - - .. seealso:: - - :ref:`autogen_module_prefix` - - :param process_revision_directives: a callable function that will - be passed a structure representing the end result of an autogenerate - or plain "revision" operation, which can be manipulated to affect - how the ``alembic revision`` command ultimately outputs new - revision scripts. The structure of the callable is:: - - def process_revision_directives(context, revision, directives): - pass - - The ``directives`` parameter is a Python list containing - a single :class:`.MigrationScript` directive, which represents - the revision file to be generated. This list as well as its - contents may be freely modified to produce any set of commands. - The section :ref:`customizing_revision` shows an example of - doing this. The ``context`` parameter is the - :class:`.MigrationContext` in use, - and ``revision`` is a tuple of revision identifiers representing the - current revision of the database. - - The callable is invoked at all times when the ``--autogenerate`` - option is passed to ``alembic revision``. If ``--autogenerate`` - is not passed, the callable is invoked only if the - ``revision_environment`` variable is set to True in the Alembic - configuration, in which case the given ``directives`` collection - will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps` - collections for ``.upgrade_ops`` and ``.downgrade_ops``. The - ``--autogenerate`` option itself can be inferred by inspecting - ``context.config.cmd_opts.autogenerate``. - - The callable function may optionally be an instance of - a :class:`.Rewriter` object. This is a helper object that - assists in the production of autogenerate-stream rewriter functions. - - .. seealso:: - - :ref:`customizing_revision` - - :ref:`autogen_rewriter` - - :paramref:`.command.revision.process_revision_directives` - - Parameters specific to individual backends: - - :param mssql_batch_separator: The "batch separator" which will - be placed between each statement when generating offline SQL Server - migrations. Defaults to ``GO``. Note this is in addition to the - customary semicolon ``;`` at the end of each statement; SQL Server - considers the "batch separator" to denote the end of an - individual statement execution, and cannot group certain - dependent operations in one step. - :param oracle_batch_separator: The "batch separator" which will - be placed between each statement when generating offline - Oracle migrations. Defaults to ``/``. Oracle doesn't add a - semicolon between statements like most other backends. - - """ - -def execute( - sql: Union[Executable, str], - execution_options: Optional[Dict[str, Any]] = None, -) -> None: - """Execute the given SQL using the current change context. - - The behavior of :meth:`.execute` is the same - as that of :meth:`.Operations.execute`. Please see that - function's documentation for full detail including - caveats and limitations. - - This function requires that a :class:`.MigrationContext` has - first been made available via :meth:`.configure`. - - """ - -def get_bind() -> Connection: - """Return the current 'bind'. - - In "online" mode, this is the - :class:`sqlalchemy.engine.Connection` currently being used - to emit SQL to the database. - - This function requires that a :class:`.MigrationContext` - has first been made available via :meth:`.configure`. - - """ - -def get_context() -> MigrationContext: - """Return the current :class:`.MigrationContext` object. - - If :meth:`.EnvironmentContext.configure` has not been - called yet, raises an exception. - - """ - -def get_head_revision() -> Union[str, Tuple[str, ...], None]: - """Return the hex identifier of the 'head' script revision. - - If the script directory has multiple heads, this - method raises a :class:`.CommandError`; - :meth:`.EnvironmentContext.get_head_revisions` should be preferred. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - .. seealso:: :meth:`.EnvironmentContext.get_head_revisions` - - """ - -def get_head_revisions() -> Union[str, Tuple[str, ...], None]: - """Return the hex identifier of the 'heads' script revision(s). - - This returns a tuple containing the version number of all - heads in the script directory. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - -def get_revision_argument() -> Union[str, Tuple[str, ...], None]: - """Get the 'destination' revision argument. - - This is typically the argument passed to the - ``upgrade`` or ``downgrade`` command. - - If it was specified as ``head``, the actual - version number is returned; if specified - as ``base``, ``None`` is returned. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - -def get_starting_revision_argument() -> Union[str, Tuple[str, ...], None]: - """Return the 'starting revision' argument, - if the revision was passed using ``start:end``. - - This is only meaningful in "offline" mode. - Returns ``None`` if no value is available - or was configured. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - -def get_tag_argument() -> Optional[str]: - """Return the value passed for the ``--tag`` argument, if any. - - The ``--tag`` argument is not used directly by Alembic, - but is available for custom ``env.py`` configurations that - wish to use it; particularly for offline generation scripts - that wish to generate tagged filenames. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - .. seealso:: - - :meth:`.EnvironmentContext.get_x_argument` - a newer and more - open ended system of extending ``env.py`` scripts via the command - line. - - """ - -@overload -def get_x_argument(as_dictionary: Literal[False]) -> List[str]: ... -@overload -def get_x_argument(as_dictionary: Literal[True]) -> Dict[str, str]: ... -@overload -def get_x_argument( - as_dictionary: bool = ..., -) -> Union[List[str], Dict[str, str]]: - """Return the value(s) passed for the ``-x`` argument, if any. - - The ``-x`` argument is an open ended flag that allows any user-defined - value or values to be passed on the command line, then available - here for consumption by a custom ``env.py`` script. - - The return value is a list, returned directly from the ``argparse`` - structure. If ``as_dictionary=True`` is passed, the ``x`` arguments - are parsed using ``key=value`` format into a dictionary that is - then returned. If there is no ``=`` in the argument, value is an empty - string. - - .. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when - arguments are passed without the ``=`` symbol. - - For example, to support passing a database URL on the command line, - the standard ``env.py`` script can be modified like this:: - - cmd_line_url = context.get_x_argument( - as_dictionary=True).get('dbname') - if cmd_line_url: - engine = create_engine(cmd_line_url) - else: - engine = engine_from_config( - config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) - - This then takes effect by running the ``alembic`` script as:: - - alembic -x dbname=postgresql://user:pass@host/dbname upgrade head - - This function does not require that the :class:`.MigrationContext` - has been configured. - - .. seealso:: - - :meth:`.EnvironmentContext.get_tag_argument` - - :attr:`.Config.cmd_opts` - - """ - -def is_offline_mode() -> bool: - """Return True if the current migrations environment - is running in "offline mode". - - This is ``True`` or ``False`` depending - on the ``--sql`` flag passed. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - -def is_transactional_ddl() -> bool: - """Return True if the context is configured to expect a - transactional DDL capable backend. - - This defaults to the type of database in use, and - can be overridden by the ``transactional_ddl`` argument - to :meth:`.configure` - - This function requires that a :class:`.MigrationContext` - has first been made available via :meth:`.configure`. - - """ - -def run_migrations(**kw: Any) -> None: - """Run migrations as determined by the current command line - configuration - as well as versioning information present (or not) in the current - database connection (if one is present). - - The function accepts optional ``**kw`` arguments. If these are - passed, they are sent directly to the ``upgrade()`` and - ``downgrade()`` - functions within each target revision file. By modifying the - ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()`` - functions accept arguments, parameters can be passed here so that - contextual information, usually information to identify a particular - database in use, can be passed from a custom ``env.py`` script - to the migration functions. - - This function requires that a :class:`.MigrationContext` has - first been made available via :meth:`.configure`. - - """ - -script: ScriptDirectory - -def static_output(text: str) -> None: - """Emit text directly to the "offline" SQL stream. - - Typically this is for emitting comments that - start with --. The statement is not treated - as a SQL execution, no ; or batch separator - is added, etc. - - """ diff --git a/myenv/Lib/site-packages/alembic/ddl/__init__.py b/myenv/Lib/site-packages/alembic/ddl/__init__.py deleted file mode 100644 index f2f72b3..0000000 --- a/myenv/Lib/site-packages/alembic/ddl/__init__.py +++ /dev/null @@ -1,6 +0,0 @@ -from . import mssql -from . import mysql -from . import oracle -from . import postgresql -from . import sqlite -from .impl import DefaultImpl as DefaultImpl diff --git a/myenv/Lib/site-packages/alembic/ddl/_autogen.py b/myenv/Lib/site-packages/alembic/ddl/_autogen.py deleted file mode 100644 index 74715b1..0000000 --- a/myenv/Lib/site-packages/alembic/ddl/_autogen.py +++ /dev/null @@ -1,329 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -from typing import Any -from typing import ClassVar -from typing import Dict -from typing import Generic -from typing import NamedTuple -from typing import Optional -from typing import Sequence -from typing import Tuple -from typing import Type -from typing import TYPE_CHECKING -from typing import TypeVar -from typing import Union - -from sqlalchemy.sql.schema import Constraint -from sqlalchemy.sql.schema import ForeignKeyConstraint -from sqlalchemy.sql.schema import Index -from sqlalchemy.sql.schema import UniqueConstraint -from typing_extensions import TypeGuard - -from .. import util -from ..util import sqla_compat - -if TYPE_CHECKING: - from typing import Literal - - from alembic.autogenerate.api import AutogenContext - from alembic.ddl.impl import DefaultImpl - -CompareConstraintType = Union[Constraint, Index] - -_C = TypeVar("_C", bound=CompareConstraintType) - -_clsreg: Dict[str, Type[_constraint_sig]] = {} - - -class ComparisonResult(NamedTuple): - status: Literal["equal", "different", "skip"] - message: str - - @property - def is_equal(self) -> bool: - return self.status == "equal" - - @property - def is_different(self) -> bool: - return self.status == "different" - - @property - def is_skip(self) -> bool: - return self.status == "skip" - - @classmethod - def Equal(cls) -> ComparisonResult: - """the constraints are equal.""" - return cls("equal", "The two constraints are equal") - - @classmethod - def Different(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult: - """the constraints are different for the provided reason(s).""" - return cls("different", ", ".join(util.to_list(reason))) - - @classmethod - def Skip(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult: - """the constraint cannot be compared for the provided reason(s). - - The message is logged, but the constraints will be otherwise - considered equal, meaning that no migration command will be - generated. - """ - return cls("skip", ", ".join(util.to_list(reason))) - - -class _constraint_sig(Generic[_C]): - const: _C - - _sig: Tuple[Any, ...] - name: Optional[sqla_compat._ConstraintNameDefined] - - impl: DefaultImpl - - _is_index: ClassVar[bool] = False - _is_fk: ClassVar[bool] = False - _is_uq: ClassVar[bool] = False - - _is_metadata: bool - - def __init_subclass__(cls) -> None: - cls._register() - - @classmethod - def _register(cls): - raise NotImplementedError() - - def __init__( - self, is_metadata: bool, impl: DefaultImpl, const: _C - ) -> None: - raise NotImplementedError() - - def compare_to_reflected( - self, other: _constraint_sig[Any] - ) -> ComparisonResult: - assert self.impl is other.impl - assert self._is_metadata - assert not other._is_metadata - - return self._compare_to_reflected(other) - - def _compare_to_reflected( - self, other: _constraint_sig[_C] - ) -> ComparisonResult: - raise NotImplementedError() - - @classmethod - def from_constraint( - cls, is_metadata: bool, impl: DefaultImpl, constraint: _C - ) -> _constraint_sig[_C]: - # these could be cached by constraint/impl, however, if the - # constraint is modified in place, then the sig is wrong. the mysql - # impl currently does this, and if we fixed that we can't be sure - # someone else might do it too, so play it safe. - sig = _clsreg[constraint.__visit_name__](is_metadata, impl, constraint) - return sig - - def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]: - return sqla_compat._get_constraint_final_name( - self.const, context.dialect - ) - - @util.memoized_property - def is_named(self): - return sqla_compat._constraint_is_named(self.const, self.impl.dialect) - - @util.memoized_property - def unnamed(self) -> Tuple[Any, ...]: - return self._sig - - @util.memoized_property - def unnamed_no_options(self) -> Tuple[Any, ...]: - raise NotImplementedError() - - @util.memoized_property - def _full_sig(self) -> Tuple[Any, ...]: - return (self.name,) + self.unnamed - - def __eq__(self, other) -> bool: - return self._full_sig == other._full_sig - - def __ne__(self, other) -> bool: - return self._full_sig != other._full_sig - - def __hash__(self) -> int: - return hash(self._full_sig) - - -class _uq_constraint_sig(_constraint_sig[UniqueConstraint]): - _is_uq = True - - @classmethod - def _register(cls) -> None: - _clsreg["unique_constraint"] = cls - - is_unique = True - - def __init__( - self, - is_metadata: bool, - impl: DefaultImpl, - const: UniqueConstraint, - ) -> None: - self.impl = impl - self.const = const - self.name = sqla_compat.constraint_name_or_none(const.name) - self._sig = tuple(sorted([col.name for col in const.columns])) - self._is_metadata = is_metadata - - @property - def column_names(self) -> Tuple[str, ...]: - return tuple([col.name for col in self.const.columns]) - - def _compare_to_reflected( - self, other: _constraint_sig[_C] - ) -> ComparisonResult: - assert self._is_metadata - metadata_obj = self - conn_obj = other - - assert is_uq_sig(conn_obj) - return self.impl.compare_unique_constraint( - metadata_obj.const, conn_obj.const - ) - - -class _ix_constraint_sig(_constraint_sig[Index]): - _is_index = True - - name: sqla_compat._ConstraintName - - @classmethod - def _register(cls) -> None: - _clsreg["index"] = cls - - def __init__( - self, is_metadata: bool, impl: DefaultImpl, const: Index - ) -> None: - self.impl = impl - self.const = const - self.name = const.name - self.is_unique = bool(const.unique) - self._is_metadata = is_metadata - - def _compare_to_reflected( - self, other: _constraint_sig[_C] - ) -> ComparisonResult: - assert self._is_metadata - metadata_obj = self - conn_obj = other - - assert is_index_sig(conn_obj) - return self.impl.compare_indexes(metadata_obj.const, conn_obj.const) - - @util.memoized_property - def has_expressions(self): - return sqla_compat.is_expression_index(self.const) - - @util.memoized_property - def column_names(self) -> Tuple[str, ...]: - return tuple([col.name for col in self.const.columns]) - - @util.memoized_property - def column_names_optional(self) -> Tuple[Optional[str], ...]: - return tuple( - [getattr(col, "name", None) for col in self.const.expressions] - ) - - @util.memoized_property - def is_named(self): - return True - - @util.memoized_property - def unnamed(self): - return (self.is_unique,) + self.column_names_optional - - -class _fk_constraint_sig(_constraint_sig[ForeignKeyConstraint]): - _is_fk = True - - @classmethod - def _register(cls) -> None: - _clsreg["foreign_key_constraint"] = cls - - def __init__( - self, - is_metadata: bool, - impl: DefaultImpl, - const: ForeignKeyConstraint, - ) -> None: - self._is_metadata = is_metadata - - self.impl = impl - self.const = const - - self.name = sqla_compat.constraint_name_or_none(const.name) - - ( - self.source_schema, - self.source_table, - self.source_columns, - self.target_schema, - self.target_table, - self.target_columns, - onupdate, - ondelete, - deferrable, - initially, - ) = sqla_compat._fk_spec(const) - - self._sig: Tuple[Any, ...] = ( - self.source_schema, - self.source_table, - tuple(self.source_columns), - self.target_schema, - self.target_table, - tuple(self.target_columns), - ) + ( - ( - (None if onupdate.lower() == "no action" else onupdate.lower()) - if onupdate - else None - ), - ( - (None if ondelete.lower() == "no action" else ondelete.lower()) - if ondelete - else None - ), - # convert initially + deferrable into one three-state value - ( - "initially_deferrable" - if initially and initially.lower() == "deferred" - else "deferrable" if deferrable else "not deferrable" - ), - ) - - @util.memoized_property - def unnamed_no_options(self): - return ( - self.source_schema, - self.source_table, - tuple(self.source_columns), - self.target_schema, - self.target_table, - tuple(self.target_columns), - ) - - -def is_index_sig(sig: _constraint_sig) -> TypeGuard[_ix_constraint_sig]: - return sig._is_index - - -def is_uq_sig(sig: _constraint_sig) -> TypeGuard[_uq_constraint_sig]: - return sig._is_uq - - -def is_fk_sig(sig: _constraint_sig) -> TypeGuard[_fk_constraint_sig]: - return sig._is_fk diff --git a/myenv/Lib/site-packages/alembic/ddl/base.py b/myenv/Lib/site-packages/alembic/ddl/base.py deleted file mode 100644 index 690c153..0000000 --- a/myenv/Lib/site-packages/alembic/ddl/base.py +++ /dev/null @@ -1,336 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import functools -from typing import Optional -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy import exc -from sqlalchemy import Integer -from sqlalchemy import types as sqltypes -from sqlalchemy.ext.compiler import compiles -from sqlalchemy.schema import Column -from sqlalchemy.schema import DDLElement -from sqlalchemy.sql.elements import quoted_name - -from ..util.sqla_compat import _columns_for_constraint # noqa -from ..util.sqla_compat import _find_columns # noqa -from ..util.sqla_compat import _fk_spec # noqa -from ..util.sqla_compat import _is_type_bound # noqa -from ..util.sqla_compat import _table_for_constraint # noqa - -if TYPE_CHECKING: - from typing import Any - - from sqlalchemy.sql.compiler import Compiled - from sqlalchemy.sql.compiler import DDLCompiler - from sqlalchemy.sql.elements import TextClause - from sqlalchemy.sql.functions import Function - from sqlalchemy.sql.schema import FetchedValue - from sqlalchemy.sql.type_api import TypeEngine - - from .impl import DefaultImpl - from ..util.sqla_compat import Computed - from ..util.sqla_compat import Identity - -_ServerDefault = Union["TextClause", "FetchedValue", "Function[Any]", str] - - -class AlterTable(DDLElement): - """Represent an ALTER TABLE statement. - - Only the string name and optional schema name of the table - is required, not a full Table object. - - """ - - def __init__( - self, - table_name: str, - schema: Optional[Union[quoted_name, str]] = None, - ) -> None: - self.table_name = table_name - self.schema = schema - - -class RenameTable(AlterTable): - def __init__( - self, - old_table_name: str, - new_table_name: Union[quoted_name, str], - schema: Optional[Union[quoted_name, str]] = None, - ) -> None: - super().__init__(old_table_name, schema=schema) - self.new_table_name = new_table_name - - -class AlterColumn(AlterTable): - def __init__( - self, - name: str, - column_name: str, - schema: Optional[str] = None, - existing_type: Optional[TypeEngine] = None, - existing_nullable: Optional[bool] = None, - existing_server_default: Optional[_ServerDefault] = None, - existing_comment: Optional[str] = None, - ) -> None: - super().__init__(name, schema=schema) - self.column_name = column_name - self.existing_type = ( - sqltypes.to_instance(existing_type) - if existing_type is not None - else None - ) - self.existing_nullable = existing_nullable - self.existing_server_default = existing_server_default - self.existing_comment = existing_comment - - -class ColumnNullable(AlterColumn): - def __init__( - self, name: str, column_name: str, nullable: bool, **kw - ) -> None: - super().__init__(name, column_name, **kw) - self.nullable = nullable - - -class ColumnType(AlterColumn): - def __init__( - self, name: str, column_name: str, type_: TypeEngine, **kw - ) -> None: - super().__init__(name, column_name, **kw) - self.type_ = sqltypes.to_instance(type_) - - -class ColumnName(AlterColumn): - def __init__( - self, name: str, column_name: str, newname: str, **kw - ) -> None: - super().__init__(name, column_name, **kw) - self.newname = newname - - -class ColumnDefault(AlterColumn): - def __init__( - self, - name: str, - column_name: str, - default: Optional[_ServerDefault], - **kw, - ) -> None: - super().__init__(name, column_name, **kw) - self.default = default - - -class ComputedColumnDefault(AlterColumn): - def __init__( - self, name: str, column_name: str, default: Optional[Computed], **kw - ) -> None: - super().__init__(name, column_name, **kw) - self.default = default - - -class IdentityColumnDefault(AlterColumn): - def __init__( - self, - name: str, - column_name: str, - default: Optional[Identity], - impl: DefaultImpl, - **kw, - ) -> None: - super().__init__(name, column_name, **kw) - self.default = default - self.impl = impl - - -class AddColumn(AlterTable): - def __init__( - self, - name: str, - column: Column[Any], - schema: Optional[Union[quoted_name, str]] = None, - ) -> None: - super().__init__(name, schema=schema) - self.column = column - - -class DropColumn(AlterTable): - def __init__( - self, name: str, column: Column[Any], schema: Optional[str] = None - ) -> None: - super().__init__(name, schema=schema) - self.column = column - - -class ColumnComment(AlterColumn): - def __init__( - self, name: str, column_name: str, comment: Optional[str], **kw - ) -> None: - super().__init__(name, column_name, **kw) - self.comment = comment - - -@compiles(RenameTable) # type: ignore[misc] -def visit_rename_table( - element: RenameTable, compiler: DDLCompiler, **kw -) -> str: - return "%s RENAME TO %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_table_name(compiler, element.new_table_name, element.schema), - ) - - -@compiles(AddColumn) # type: ignore[misc] -def visit_add_column(element: AddColumn, compiler: DDLCompiler, **kw) -> str: - return "%s %s" % ( - alter_table(compiler, element.table_name, element.schema), - add_column(compiler, element.column, **kw), - ) - - -@compiles(DropColumn) # type: ignore[misc] -def visit_drop_column(element: DropColumn, compiler: DDLCompiler, **kw) -> str: - return "%s %s" % ( - alter_table(compiler, element.table_name, element.schema), - drop_column(compiler, element.column.name, **kw), - ) - - -@compiles(ColumnNullable) # type: ignore[misc] -def visit_column_nullable( - element: ColumnNullable, compiler: DDLCompiler, **kw -) -> str: - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "DROP NOT NULL" if element.nullable else "SET NOT NULL", - ) - - -@compiles(ColumnType) # type: ignore[misc] -def visit_column_type(element: ColumnType, compiler: DDLCompiler, **kw) -> str: - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "TYPE %s" % format_type(compiler, element.type_), - ) - - -@compiles(ColumnName) # type: ignore[misc] -def visit_column_name(element: ColumnName, compiler: DDLCompiler, **kw) -> str: - return "%s RENAME %s TO %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - format_column_name(compiler, element.newname), - ) - - -@compiles(ColumnDefault) # type: ignore[misc] -def visit_column_default( - element: ColumnDefault, compiler: DDLCompiler, **kw -) -> str: - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - ( - "SET DEFAULT %s" % format_server_default(compiler, element.default) - if element.default is not None - else "DROP DEFAULT" - ), - ) - - -@compiles(ComputedColumnDefault) # type: ignore[misc] -def visit_computed_column( - element: ComputedColumnDefault, compiler: DDLCompiler, **kw -): - raise exc.CompileError( - 'Adding or removing a "computed" construct, e.g. GENERATED ' - "ALWAYS AS, to or from an existing column is not supported." - ) - - -@compiles(IdentityColumnDefault) # type: ignore[misc] -def visit_identity_column( - element: IdentityColumnDefault, compiler: DDLCompiler, **kw -): - raise exc.CompileError( - 'Adding, removing or modifying an "identity" construct, ' - "e.g. GENERATED AS IDENTITY, to or from an existing " - "column is not supported in this dialect." - ) - - -def quote_dotted( - name: Union[quoted_name, str], quote: functools.partial -) -> Union[quoted_name, str]: - """quote the elements of a dotted name""" - - if isinstance(name, quoted_name): - return quote(name) - result = ".".join([quote(x) for x in name.split(".")]) - return result - - -def format_table_name( - compiler: Compiled, - name: Union[quoted_name, str], - schema: Optional[Union[quoted_name, str]], -) -> Union[quoted_name, str]: - quote = functools.partial(compiler.preparer.quote) - if schema: - return quote_dotted(schema, quote) + "." + quote(name) - else: - return quote(name) - - -def format_column_name( - compiler: DDLCompiler, name: Optional[Union[quoted_name, str]] -) -> Union[quoted_name, str]: - return compiler.preparer.quote(name) # type: ignore[arg-type] - - -def format_server_default( - compiler: DDLCompiler, - default: Optional[_ServerDefault], -) -> str: - return compiler.get_column_default_string( - Column("x", Integer, server_default=default) - ) - - -def format_type(compiler: DDLCompiler, type_: TypeEngine) -> str: - return compiler.dialect.type_compiler.process(type_) - - -def alter_table( - compiler: DDLCompiler, - name: str, - schema: Optional[str], -) -> str: - return "ALTER TABLE %s" % format_table_name(compiler, name, schema) - - -def drop_column(compiler: DDLCompiler, name: str, **kw) -> str: - return "DROP COLUMN %s" % format_column_name(compiler, name) - - -def alter_column(compiler: DDLCompiler, name: str) -> str: - return "ALTER COLUMN %s" % format_column_name(compiler, name) - - -def add_column(compiler: DDLCompiler, column: Column[Any], **kw) -> str: - text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) - - const = " ".join( - compiler.process(constraint) for constraint in column.constraints - ) - if const: - text += " " + const - - return text diff --git a/myenv/Lib/site-packages/alembic/ddl/impl.py b/myenv/Lib/site-packages/alembic/ddl/impl.py deleted file mode 100644 index 2574688..0000000 --- a/myenv/Lib/site-packages/alembic/ddl/impl.py +++ /dev/null @@ -1,849 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import logging -import re -from typing import Any -from typing import Callable -from typing import Dict -from typing import Iterable -from typing import List -from typing import Mapping -from typing import NamedTuple -from typing import Optional -from typing import Sequence -from typing import Set -from typing import Tuple -from typing import Type -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy import cast -from sqlalchemy import schema -from sqlalchemy import text - -from . import _autogen -from . import base -from ._autogen import _constraint_sig as _constraint_sig -from ._autogen import ComparisonResult as ComparisonResult -from .. import util -from ..util import sqla_compat - -if TYPE_CHECKING: - from typing import Literal - from typing import TextIO - - from sqlalchemy.engine import Connection - from sqlalchemy.engine import Dialect - from sqlalchemy.engine.cursor import CursorResult - from sqlalchemy.engine.reflection import Inspector - from sqlalchemy.sql import ClauseElement - from sqlalchemy.sql import Executable - from sqlalchemy.sql.elements import ColumnElement - from sqlalchemy.sql.elements import quoted_name - from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Constraint - from sqlalchemy.sql.schema import ForeignKeyConstraint - from sqlalchemy.sql.schema import Index - from sqlalchemy.sql.schema import Table - from sqlalchemy.sql.schema import UniqueConstraint - from sqlalchemy.sql.selectable import TableClause - from sqlalchemy.sql.type_api import TypeEngine - - from .base import _ServerDefault - from ..autogenerate.api import AutogenContext - from ..operations.batch import ApplyBatchImpl - from ..operations.batch import BatchOperationsImpl - -log = logging.getLogger(__name__) - - -class ImplMeta(type): - def __init__( - cls, - classname: str, - bases: Tuple[Type[DefaultImpl]], - dict_: Dict[str, Any], - ): - newtype = type.__init__(cls, classname, bases, dict_) - if "__dialect__" in dict_: - _impls[dict_["__dialect__"]] = cls # type: ignore[assignment] - return newtype - - -_impls: Dict[str, Type[DefaultImpl]] = {} - - -class DefaultImpl(metaclass=ImplMeta): - """Provide the entrypoint for major migration operations, - including database-specific behavioral variances. - - While individual SQL/DDL constructs already provide - for database-specific implementations, variances here - allow for entirely different sequences of operations - to take place for a particular migration, such as - SQL Server's special 'IDENTITY INSERT' step for - bulk inserts. - - """ - - __dialect__ = "default" - - transactional_ddl = False - command_terminator = ";" - type_synonyms: Tuple[Set[str], ...] = ({"NUMERIC", "DECIMAL"},) - type_arg_extract: Sequence[str] = () - # These attributes are deprecated in SQLAlchemy via #10247. They need to - # be ignored to support older version that did not use dialect kwargs. - # They only apply to Oracle and are replaced by oracle_order, - # oracle_on_null - identity_attrs_ignore: Tuple[str, ...] = ("order", "on_null") - - def __init__( - self, - dialect: Dialect, - connection: Optional[Connection], - as_sql: bool, - transactional_ddl: Optional[bool], - output_buffer: Optional[TextIO], - context_opts: Dict[str, Any], - ) -> None: - self.dialect = dialect - self.connection = connection - self.as_sql = as_sql - self.literal_binds = context_opts.get("literal_binds", False) - - self.output_buffer = output_buffer - self.memo: dict = {} - self.context_opts = context_opts - if transactional_ddl is not None: - self.transactional_ddl = transactional_ddl - - if self.literal_binds: - if not self.as_sql: - raise util.CommandError( - "Can't use literal_binds setting without as_sql mode" - ) - - @classmethod - def get_by_dialect(cls, dialect: Dialect) -> Type[DefaultImpl]: - return _impls[dialect.name] - - def static_output(self, text: str) -> None: - assert self.output_buffer is not None - self.output_buffer.write(text + "\n\n") - self.output_buffer.flush() - - def requires_recreate_in_batch( - self, batch_op: BatchOperationsImpl - ) -> bool: - """Return True if the given :class:`.BatchOperationsImpl` - would need the table to be recreated and copied in order to - proceed. - - Normally, only returns True on SQLite when operations other - than add_column are present. - - """ - return False - - def prep_table_for_batch( - self, batch_impl: ApplyBatchImpl, table: Table - ) -> None: - """perform any operations needed on a table before a new - one is created to replace it in batch mode. - - the PG dialect uses this to drop constraints on the table - before the new one uses those same names. - - """ - - @property - def bind(self) -> Optional[Connection]: - return self.connection - - def _exec( - self, - construct: Union[Executable, str], - execution_options: Optional[Mapping[str, Any]] = None, - multiparams: Optional[Sequence[Mapping[str, Any]]] = None, - params: Mapping[str, Any] = util.immutabledict(), - ) -> Optional[CursorResult]: - if isinstance(construct, str): - construct = text(construct) - if self.as_sql: - if multiparams is not None or params: - raise TypeError("SQL parameters not allowed with as_sql") - - compile_kw: dict[str, Any] - if self.literal_binds and not isinstance( - construct, schema.DDLElement - ): - compile_kw = dict(compile_kwargs={"literal_binds": True}) - else: - compile_kw = {} - - if TYPE_CHECKING: - assert isinstance(construct, ClauseElement) - compiled = construct.compile(dialect=self.dialect, **compile_kw) - self.static_output( - str(compiled).replace("\t", " ").strip() - + self.command_terminator - ) - return None - else: - conn = self.connection - assert conn is not None - if execution_options: - conn = conn.execution_options(**execution_options) - - if params and multiparams is not None: - raise TypeError( - "Can't send params and multiparams at the same time" - ) - - if multiparams: - return conn.execute(construct, multiparams) - else: - return conn.execute(construct, params) - - def execute( - self, - sql: Union[Executable, str], - execution_options: Optional[dict[str, Any]] = None, - ) -> None: - self._exec(sql, execution_options) - - def alter_column( - self, - table_name: str, - column_name: str, - nullable: Optional[bool] = None, - server_default: Union[_ServerDefault, Literal[False]] = False, - name: Optional[str] = None, - type_: Optional[TypeEngine] = None, - schema: Optional[str] = None, - autoincrement: Optional[bool] = None, - comment: Optional[Union[str, Literal[False]]] = False, - existing_comment: Optional[str] = None, - existing_type: Optional[TypeEngine] = None, - existing_server_default: Optional[_ServerDefault] = None, - existing_nullable: Optional[bool] = None, - existing_autoincrement: Optional[bool] = None, - **kw: Any, - ) -> None: - if autoincrement is not None or existing_autoincrement is not None: - util.warn( - "autoincrement and existing_autoincrement " - "only make sense for MySQL", - stacklevel=3, - ) - if nullable is not None: - self._exec( - base.ColumnNullable( - table_name, - column_name, - nullable, - schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - existing_comment=existing_comment, - ) - ) - if server_default is not False: - kw = {} - cls_: Type[ - Union[ - base.ComputedColumnDefault, - base.IdentityColumnDefault, - base.ColumnDefault, - ] - ] - if sqla_compat._server_default_is_computed( - server_default, existing_server_default - ): - cls_ = base.ComputedColumnDefault - elif sqla_compat._server_default_is_identity( - server_default, existing_server_default - ): - cls_ = base.IdentityColumnDefault - kw["impl"] = self - else: - cls_ = base.ColumnDefault - self._exec( - cls_( - table_name, - column_name, - server_default, # type:ignore[arg-type] - schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - existing_comment=existing_comment, - **kw, - ) - ) - if type_ is not None: - self._exec( - base.ColumnType( - table_name, - column_name, - type_, - schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - existing_comment=existing_comment, - ) - ) - - if comment is not False: - self._exec( - base.ColumnComment( - table_name, - column_name, - comment, - schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - existing_comment=existing_comment, - ) - ) - - # do the new name last ;) - if name is not None: - self._exec( - base.ColumnName( - table_name, - column_name, - name, - schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - ) - ) - - def add_column( - self, - table_name: str, - column: Column[Any], - schema: Optional[Union[str, quoted_name]] = None, - ) -> None: - self._exec(base.AddColumn(table_name, column, schema=schema)) - - def drop_column( - self, - table_name: str, - column: Column[Any], - schema: Optional[str] = None, - **kw, - ) -> None: - self._exec(base.DropColumn(table_name, column, schema=schema)) - - def add_constraint(self, const: Any) -> None: - if const._create_rule is None or const._create_rule(self): - self._exec(schema.AddConstraint(const)) - - def drop_constraint(self, const: Constraint) -> None: - self._exec(schema.DropConstraint(const)) - - def rename_table( - self, - old_table_name: str, - new_table_name: Union[str, quoted_name], - schema: Optional[Union[str, quoted_name]] = None, - ) -> None: - self._exec( - base.RenameTable(old_table_name, new_table_name, schema=schema) - ) - - def create_table(self, table: Table, **kw: Any) -> None: - table.dispatch.before_create( - table, self.connection, checkfirst=False, _ddl_runner=self - ) - self._exec(schema.CreateTable(table, **kw)) - table.dispatch.after_create( - table, self.connection, checkfirst=False, _ddl_runner=self - ) - for index in table.indexes: - self._exec(schema.CreateIndex(index)) - - with_comment = ( - self.dialect.supports_comments and not self.dialect.inline_comments - ) - comment = table.comment - if comment and with_comment: - self.create_table_comment(table) - - for column in table.columns: - comment = column.comment - if comment and with_comment: - self.create_column_comment(column) - - def drop_table(self, table: Table, **kw: Any) -> None: - table.dispatch.before_drop( - table, self.connection, checkfirst=False, _ddl_runner=self - ) - self._exec(schema.DropTable(table, **kw)) - table.dispatch.after_drop( - table, self.connection, checkfirst=False, _ddl_runner=self - ) - - def create_index(self, index: Index, **kw: Any) -> None: - self._exec(schema.CreateIndex(index, **kw)) - - def create_table_comment(self, table: Table) -> None: - self._exec(schema.SetTableComment(table)) - - def drop_table_comment(self, table: Table) -> None: - self._exec(schema.DropTableComment(table)) - - def create_column_comment(self, column: ColumnElement[Any]) -> None: - self._exec(schema.SetColumnComment(column)) - - def drop_index(self, index: Index, **kw: Any) -> None: - self._exec(schema.DropIndex(index, **kw)) - - def bulk_insert( - self, - table: Union[TableClause, Table], - rows: List[dict], - multiinsert: bool = True, - ) -> None: - if not isinstance(rows, list): - raise TypeError("List expected") - elif rows and not isinstance(rows[0], dict): - raise TypeError("List of dictionaries expected") - if self.as_sql: - for row in rows: - self._exec( - sqla_compat._insert_inline(table).values( - **{ - k: ( - sqla_compat._literal_bindparam( - k, v, type_=table.c[k].type - ) - if not isinstance( - v, sqla_compat._literal_bindparam - ) - else v - ) - for k, v in row.items() - } - ) - ) - else: - if rows: - if multiinsert: - self._exec( - sqla_compat._insert_inline(table), multiparams=rows - ) - else: - for row in rows: - self._exec( - sqla_compat._insert_inline(table).values(**row) - ) - - def _tokenize_column_type(self, column: Column) -> Params: - definition: str - definition = self.dialect.type_compiler.process(column.type).lower() - - # tokenize the SQLAlchemy-generated version of a type, so that - # the two can be compared. - # - # examples: - # NUMERIC(10, 5) - # TIMESTAMP WITH TIMEZONE - # INTEGER UNSIGNED - # INTEGER (10) UNSIGNED - # INTEGER(10) UNSIGNED - # varchar character set utf8 - # - - tokens: List[str] = re.findall(r"[\w\-_]+|\(.+?\)", definition) - - term_tokens: List[str] = [] - paren_term = None - - for token in tokens: - if re.match(r"^\(.*\)$", token): - paren_term = token - else: - term_tokens.append(token) - - params = Params(term_tokens[0], term_tokens[1:], [], {}) - - if paren_term: - term: str - for term in re.findall("[^(),]+", paren_term): - if "=" in term: - key, val = term.split("=") - params.kwargs[key.strip()] = val.strip() - else: - params.args.append(term.strip()) - - return params - - def _column_types_match( - self, inspector_params: Params, metadata_params: Params - ) -> bool: - if inspector_params.token0 == metadata_params.token0: - return True - - synonyms = [{t.lower() for t in batch} for batch in self.type_synonyms] - inspector_all_terms = " ".join( - [inspector_params.token0] + inspector_params.tokens - ) - metadata_all_terms = " ".join( - [metadata_params.token0] + metadata_params.tokens - ) - - for batch in synonyms: - if {inspector_all_terms, metadata_all_terms}.issubset(batch) or { - inspector_params.token0, - metadata_params.token0, - }.issubset(batch): - return True - return False - - def _column_args_match( - self, inspected_params: Params, meta_params: Params - ) -> bool: - """We want to compare column parameters. However, we only want - to compare parameters that are set. If they both have `collation`, - we want to make sure they are the same. However, if only one - specifies it, dont flag it for being less specific - """ - - if ( - len(meta_params.tokens) == len(inspected_params.tokens) - and meta_params.tokens != inspected_params.tokens - ): - return False - - if ( - len(meta_params.args) == len(inspected_params.args) - and meta_params.args != inspected_params.args - ): - return False - - insp = " ".join(inspected_params.tokens).lower() - meta = " ".join(meta_params.tokens).lower() - - for reg in self.type_arg_extract: - mi = re.search(reg, insp) - mm = re.search(reg, meta) - - if mi and mm and mi.group(1) != mm.group(1): - return False - - return True - - def compare_type( - self, inspector_column: Column[Any], metadata_column: Column - ) -> bool: - """Returns True if there ARE differences between the types of the two - columns. Takes impl.type_synonyms into account between retrospected - and metadata types - """ - inspector_params = self._tokenize_column_type(inspector_column) - metadata_params = self._tokenize_column_type(metadata_column) - - if not self._column_types_match(inspector_params, metadata_params): - return True - if not self._column_args_match(inspector_params, metadata_params): - return True - return False - - def compare_server_default( - self, - inspector_column, - metadata_column, - rendered_metadata_default, - rendered_inspector_default, - ): - return rendered_inspector_default != rendered_metadata_default - - def correct_for_autogen_constraints( - self, - conn_uniques: Set[UniqueConstraint], - conn_indexes: Set[Index], - metadata_unique_constraints: Set[UniqueConstraint], - metadata_indexes: Set[Index], - ) -> None: - pass - - def cast_for_batch_migrate(self, existing, existing_transfer, new_type): - if existing.type._type_affinity is not new_type._type_affinity: - existing_transfer["expr"] = cast( - existing_transfer["expr"], new_type - ) - - def render_ddl_sql_expr( - self, expr: ClauseElement, is_server_default: bool = False, **kw: Any - ) -> str: - """Render a SQL expression that is typically a server default, - index expression, etc. - - """ - - compile_kw = {"literal_binds": True, "include_table": False} - - return str( - expr.compile(dialect=self.dialect, compile_kwargs=compile_kw) - ) - - def _compat_autogen_column_reflect(self, inspector: Inspector) -> Callable: - return self.autogen_column_reflect - - def correct_for_autogen_foreignkeys( - self, - conn_fks: Set[ForeignKeyConstraint], - metadata_fks: Set[ForeignKeyConstraint], - ) -> None: - pass - - def autogen_column_reflect(self, inspector, table, column_info): - """A hook that is attached to the 'column_reflect' event for when - a Table is reflected from the database during the autogenerate - process. - - Dialects can elect to modify the information gathered here. - - """ - - def start_migrations(self) -> None: - """A hook called when :meth:`.EnvironmentContext.run_migrations` - is called. - - Implementations can set up per-migration-run state here. - - """ - - def emit_begin(self) -> None: - """Emit the string ``BEGIN``, or the backend-specific - equivalent, on the current connection context. - - This is used in offline mode and typically - via :meth:`.EnvironmentContext.begin_transaction`. - - """ - self.static_output("BEGIN" + self.command_terminator) - - def emit_commit(self) -> None: - """Emit the string ``COMMIT``, or the backend-specific - equivalent, on the current connection context. - - This is used in offline mode and typically - via :meth:`.EnvironmentContext.begin_transaction`. - - """ - self.static_output("COMMIT" + self.command_terminator) - - def render_type( - self, type_obj: TypeEngine, autogen_context: AutogenContext - ) -> Union[str, Literal[False]]: - return False - - def _compare_identity_default(self, metadata_identity, inspector_identity): - # ignored contains the attributes that were not considered - # because assumed to their default values in the db. - diff, ignored = _compare_identity_options( - metadata_identity, - inspector_identity, - sqla_compat.Identity(), - skip={"always"}, - ) - - meta_always = getattr(metadata_identity, "always", None) - inspector_always = getattr(inspector_identity, "always", None) - # None and False are the same in this comparison - if bool(meta_always) != bool(inspector_always): - diff.add("always") - - diff.difference_update(self.identity_attrs_ignore) - - # returns 3 values: - return ( - # different identity attributes - diff, - # ignored identity attributes - ignored, - # if the two identity should be considered different - bool(diff) or bool(metadata_identity) != bool(inspector_identity), - ) - - def _compare_index_unique( - self, metadata_index: Index, reflected_index: Index - ) -> Optional[str]: - conn_unique = bool(reflected_index.unique) - meta_unique = bool(metadata_index.unique) - if conn_unique != meta_unique: - return f"unique={conn_unique} to unique={meta_unique}" - else: - return None - - def _create_metadata_constraint_sig( - self, constraint: _autogen._C, **opts: Any - ) -> _constraint_sig[_autogen._C]: - return _constraint_sig.from_constraint(True, self, constraint, **opts) - - def _create_reflected_constraint_sig( - self, constraint: _autogen._C, **opts: Any - ) -> _constraint_sig[_autogen._C]: - return _constraint_sig.from_constraint(False, self, constraint, **opts) - - def compare_indexes( - self, - metadata_index: Index, - reflected_index: Index, - ) -> ComparisonResult: - """Compare two indexes by comparing the signature generated by - ``create_index_sig``. - - This method returns a ``ComparisonResult``. - """ - msg: List[str] = [] - unique_msg = self._compare_index_unique( - metadata_index, reflected_index - ) - if unique_msg: - msg.append(unique_msg) - m_sig = self._create_metadata_constraint_sig(metadata_index) - r_sig = self._create_reflected_constraint_sig(reflected_index) - - assert _autogen.is_index_sig(m_sig) - assert _autogen.is_index_sig(r_sig) - - # The assumption is that the index have no expression - for sig in m_sig, r_sig: - if sig.has_expressions: - log.warning( - "Generating approximate signature for index %s. " - "The dialect " - "implementation should either skip expression indexes " - "or provide a custom implementation.", - sig.const, - ) - - if m_sig.column_names != r_sig.column_names: - msg.append( - f"expression {r_sig.column_names} to {m_sig.column_names}" - ) - - if msg: - return ComparisonResult.Different(msg) - else: - return ComparisonResult.Equal() - - def compare_unique_constraint( - self, - metadata_constraint: UniqueConstraint, - reflected_constraint: UniqueConstraint, - ) -> ComparisonResult: - """Compare two unique constraints by comparing the two signatures. - - The arguments are two tuples that contain the unique constraint and - the signatures generated by ``create_unique_constraint_sig``. - - This method returns a ``ComparisonResult``. - """ - metadata_tup = self._create_metadata_constraint_sig( - metadata_constraint - ) - reflected_tup = self._create_reflected_constraint_sig( - reflected_constraint - ) - - meta_sig = metadata_tup.unnamed - conn_sig = reflected_tup.unnamed - if conn_sig != meta_sig: - return ComparisonResult.Different( - f"expression {conn_sig} to {meta_sig}" - ) - else: - return ComparisonResult.Equal() - - def _skip_functional_indexes(self, metadata_indexes, conn_indexes): - conn_indexes_by_name = {c.name: c for c in conn_indexes} - - for idx in list(metadata_indexes): - if idx.name in conn_indexes_by_name: - continue - iex = sqla_compat.is_expression_index(idx) - if iex: - util.warn( - "autogenerate skipping metadata-specified " - "expression-based index " - f"{idx.name!r}; dialect {self.__dialect__!r} under " - f"SQLAlchemy {sqla_compat.sqlalchemy_version} can't " - "reflect these indexes so they can't be compared" - ) - metadata_indexes.discard(idx) - - def adjust_reflected_dialect_options( - self, reflected_object: Dict[str, Any], kind: str - ) -> Dict[str, Any]: - return reflected_object.get("dialect_options", {}) - - -class Params(NamedTuple): - token0: str - tokens: List[str] - args: List[str] - kwargs: Dict[str, str] - - -def _compare_identity_options( - metadata_io: Union[schema.Identity, schema.Sequence, None], - inspector_io: Union[schema.Identity, schema.Sequence, None], - default_io: Union[schema.Identity, schema.Sequence], - skip: Set[str], -): - # this can be used for identity or sequence compare. - # default_io is an instance of IdentityOption with all attributes to the - # default value. - meta_d = sqla_compat._get_identity_options_dict(metadata_io) - insp_d = sqla_compat._get_identity_options_dict(inspector_io) - - diff = set() - ignored_attr = set() - - def check_dicts( - meta_dict: Mapping[str, Any], - insp_dict: Mapping[str, Any], - default_dict: Mapping[str, Any], - attrs: Iterable[str], - ): - for attr in set(attrs).difference(skip): - meta_value = meta_dict.get(attr) - insp_value = insp_dict.get(attr) - if insp_value != meta_value: - default_value = default_dict.get(attr) - if meta_value == default_value: - ignored_attr.add(attr) - else: - diff.add(attr) - - check_dicts( - meta_d, - insp_d, - sqla_compat._get_identity_options_dict(default_io), - set(meta_d).union(insp_d), - ) - if sqla_compat.identity_has_dialect_kwargs: - # use only the dialect kwargs in inspector_io since metadata_io - # can have options for many backends - check_dicts( - getattr(metadata_io, "dialect_kwargs", {}), - getattr(inspector_io, "dialect_kwargs", {}), - default_io.dialect_kwargs, # type: ignore[union-attr] - getattr(inspector_io, "dialect_kwargs", {}), - ) - - return diff, ignored_attr diff --git a/myenv/Lib/site-packages/alembic/ddl/mssql.py b/myenv/Lib/site-packages/alembic/ddl/mssql.py deleted file mode 100644 index baa43d5..0000000 --- a/myenv/Lib/site-packages/alembic/ddl/mssql.py +++ /dev/null @@ -1,419 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import re -from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy import types as sqltypes -from sqlalchemy.schema import Column -from sqlalchemy.schema import CreateIndex -from sqlalchemy.sql.base import Executable -from sqlalchemy.sql.elements import ClauseElement - -from .base import AddColumn -from .base import alter_column -from .base import alter_table -from .base import ColumnDefault -from .base import ColumnName -from .base import ColumnNullable -from .base import ColumnType -from .base import format_column_name -from .base import format_server_default -from .base import format_table_name -from .base import format_type -from .base import RenameTable -from .impl import DefaultImpl -from .. import util -from ..util import sqla_compat -from ..util.sqla_compat import compiles - -if TYPE_CHECKING: - from typing import Literal - - from sqlalchemy.dialects.mssql.base import MSDDLCompiler - from sqlalchemy.dialects.mssql.base import MSSQLCompiler - from sqlalchemy.engine.cursor import CursorResult - from sqlalchemy.sql.schema import Index - from sqlalchemy.sql.schema import Table - from sqlalchemy.sql.selectable import TableClause - from sqlalchemy.sql.type_api import TypeEngine - - from .base import _ServerDefault - - -class MSSQLImpl(DefaultImpl): - __dialect__ = "mssql" - transactional_ddl = True - batch_separator = "GO" - - type_synonyms = DefaultImpl.type_synonyms + ({"VARCHAR", "NVARCHAR"},) - identity_attrs_ignore = DefaultImpl.identity_attrs_ignore + ( - "minvalue", - "maxvalue", - "nominvalue", - "nomaxvalue", - "cycle", - "cache", - ) - - def __init__(self, *arg, **kw) -> None: - super().__init__(*arg, **kw) - self.batch_separator = self.context_opts.get( - "mssql_batch_separator", self.batch_separator - ) - - def _exec(self, construct: Any, *args, **kw) -> Optional[CursorResult]: - result = super()._exec(construct, *args, **kw) - if self.as_sql and self.batch_separator: - self.static_output(self.batch_separator) - return result - - def emit_begin(self) -> None: - self.static_output("BEGIN TRANSACTION" + self.command_terminator) - - def emit_commit(self) -> None: - super().emit_commit() - if self.as_sql and self.batch_separator: - self.static_output(self.batch_separator) - - def alter_column( # type:ignore[override] - self, - table_name: str, - column_name: str, - nullable: Optional[bool] = None, - server_default: Optional[ - Union[_ServerDefault, Literal[False]] - ] = False, - name: Optional[str] = None, - type_: Optional[TypeEngine] = None, - schema: Optional[str] = None, - existing_type: Optional[TypeEngine] = None, - existing_server_default: Optional[_ServerDefault] = None, - existing_nullable: Optional[bool] = None, - **kw: Any, - ) -> None: - if nullable is not None: - if type_ is not None: - # the NULL/NOT NULL alter will handle - # the type alteration - existing_type = type_ - type_ = None - elif existing_type is None: - raise util.CommandError( - "MS-SQL ALTER COLUMN operations " - "with NULL or NOT NULL require the " - "existing_type or a new type_ be passed." - ) - elif existing_nullable is not None and type_ is not None: - nullable = existing_nullable - - # the NULL/NOT NULL alter will handle - # the type alteration - existing_type = type_ - type_ = None - - elif type_ is not None: - util.warn( - "MS-SQL ALTER COLUMN operations that specify type_= " - "should also specify a nullable= or " - "existing_nullable= argument to avoid implicit conversion " - "of NOT NULL columns to NULL." - ) - - used_default = False - if sqla_compat._server_default_is_identity( - server_default, existing_server_default - ) or sqla_compat._server_default_is_computed( - server_default, existing_server_default - ): - used_default = True - kw["server_default"] = server_default - kw["existing_server_default"] = existing_server_default - - super().alter_column( - table_name, - column_name, - nullable=nullable, - type_=type_, - schema=schema, - existing_type=existing_type, - existing_nullable=existing_nullable, - **kw, - ) - - if server_default is not False and used_default is False: - if existing_server_default is not False or server_default is None: - self._exec( - _ExecDropConstraint( - table_name, - column_name, - "sys.default_constraints", - schema, - ) - ) - if server_default is not None: - super().alter_column( - table_name, - column_name, - schema=schema, - server_default=server_default, - ) - - if name is not None: - super().alter_column( - table_name, column_name, schema=schema, name=name - ) - - def create_index(self, index: Index, **kw: Any) -> None: - # this likely defaults to None if not present, so get() - # should normally not return the default value. being - # defensive in any case - mssql_include = index.kwargs.get("mssql_include", None) or () - assert index.table is not None - for col in mssql_include: - if col not in index.table.c: - index.table.append_column(Column(col, sqltypes.NullType)) - self._exec(CreateIndex(index, **kw)) - - def bulk_insert( # type:ignore[override] - self, table: Union[TableClause, Table], rows: List[dict], **kw: Any - ) -> None: - if self.as_sql: - self._exec( - "SET IDENTITY_INSERT %s ON" - % self.dialect.identifier_preparer.format_table(table) - ) - super().bulk_insert(table, rows, **kw) - self._exec( - "SET IDENTITY_INSERT %s OFF" - % self.dialect.identifier_preparer.format_table(table) - ) - else: - super().bulk_insert(table, rows, **kw) - - def drop_column( - self, - table_name: str, - column: Column[Any], - schema: Optional[str] = None, - **kw, - ) -> None: - drop_default = kw.pop("mssql_drop_default", False) - if drop_default: - self._exec( - _ExecDropConstraint( - table_name, column, "sys.default_constraints", schema - ) - ) - drop_check = kw.pop("mssql_drop_check", False) - if drop_check: - self._exec( - _ExecDropConstraint( - table_name, column, "sys.check_constraints", schema - ) - ) - drop_fks = kw.pop("mssql_drop_foreign_key", False) - if drop_fks: - self._exec(_ExecDropFKConstraint(table_name, column, schema)) - super().drop_column(table_name, column, schema=schema, **kw) - - def compare_server_default( - self, - inspector_column, - metadata_column, - rendered_metadata_default, - rendered_inspector_default, - ): - if rendered_metadata_default is not None: - rendered_metadata_default = re.sub( - r"[\(\) \"\']", "", rendered_metadata_default - ) - - if rendered_inspector_default is not None: - # SQL Server collapses whitespace and adds arbitrary parenthesis - # within expressions. our only option is collapse all of it - - rendered_inspector_default = re.sub( - r"[\(\) \"\']", "", rendered_inspector_default - ) - - return rendered_inspector_default != rendered_metadata_default - - def _compare_identity_default(self, metadata_identity, inspector_identity): - diff, ignored, is_alter = super()._compare_identity_default( - metadata_identity, inspector_identity - ) - - if ( - metadata_identity is None - and inspector_identity is not None - and not diff - and inspector_identity.column is not None - and inspector_identity.column.primary_key - ): - # mssql reflect primary keys with autoincrement as identity - # columns. if no different attributes are present ignore them - is_alter = False - - return diff, ignored, is_alter - - def adjust_reflected_dialect_options( - self, reflected_object: Dict[str, Any], kind: str - ) -> Dict[str, Any]: - options: Dict[str, Any] - options = reflected_object.get("dialect_options", {}).copy() - if not options.get("mssql_include"): - options.pop("mssql_include", None) - if not options.get("mssql_clustered"): - options.pop("mssql_clustered", None) - return options - - -class _ExecDropConstraint(Executable, ClauseElement): - inherit_cache = False - - def __init__( - self, - tname: str, - colname: Union[Column[Any], str], - type_: str, - schema: Optional[str], - ) -> None: - self.tname = tname - self.colname = colname - self.type_ = type_ - self.schema = schema - - -class _ExecDropFKConstraint(Executable, ClauseElement): - inherit_cache = False - - def __init__( - self, tname: str, colname: Column[Any], schema: Optional[str] - ) -> None: - self.tname = tname - self.colname = colname - self.schema = schema - - -@compiles(_ExecDropConstraint, "mssql") -def _exec_drop_col_constraint( - element: _ExecDropConstraint, compiler: MSSQLCompiler, **kw -) -> str: - schema, tname, colname, type_ = ( - element.schema, - element.tname, - element.colname, - element.type_, - ) - # from http://www.mssqltips.com/sqlservertip/1425/\ - # working-with-default-constraints-in-sql-server/ - return """declare @const_name varchar(256) -select @const_name = QUOTENAME([name]) from %(type)s -where parent_object_id = object_id('%(schema_dot)s%(tname)s') -and col_name(parent_object_id, parent_column_id) = '%(colname)s' -exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % { - "type": type_, - "tname": tname, - "colname": colname, - "tname_quoted": format_table_name(compiler, tname, schema), - "schema_dot": schema + "." if schema else "", - } - - -@compiles(_ExecDropFKConstraint, "mssql") -def _exec_drop_col_fk_constraint( - element: _ExecDropFKConstraint, compiler: MSSQLCompiler, **kw -) -> str: - schema, tname, colname = element.schema, element.tname, element.colname - - return """declare @const_name varchar(256) -select @const_name = QUOTENAME([name]) from -sys.foreign_keys fk join sys.foreign_key_columns fkc -on fk.object_id=fkc.constraint_object_id -where fkc.parent_object_id = object_id('%(schema_dot)s%(tname)s') -and col_name(fkc.parent_object_id, fkc.parent_column_id) = '%(colname)s' -exec('alter table %(tname_quoted)s drop constraint ' + @const_name)""" % { - "tname": tname, - "colname": colname, - "tname_quoted": format_table_name(compiler, tname, schema), - "schema_dot": schema + "." if schema else "", - } - - -@compiles(AddColumn, "mssql") -def visit_add_column(element: AddColumn, compiler: MSDDLCompiler, **kw) -> str: - return "%s %s" % ( - alter_table(compiler, element.table_name, element.schema), - mssql_add_column(compiler, element.column, **kw), - ) - - -def mssql_add_column( - compiler: MSDDLCompiler, column: Column[Any], **kw -) -> str: - return "ADD %s" % compiler.get_column_specification(column, **kw) - - -@compiles(ColumnNullable, "mssql") -def visit_column_nullable( - element: ColumnNullable, compiler: MSDDLCompiler, **kw -) -> str: - return "%s %s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - format_type(compiler, element.existing_type), # type: ignore[arg-type] - "NULL" if element.nullable else "NOT NULL", - ) - - -@compiles(ColumnDefault, "mssql") -def visit_column_default( - element: ColumnDefault, compiler: MSDDLCompiler, **kw -) -> str: - # TODO: there can also be a named constraint - # with ADD CONSTRAINT here - return "%s ADD DEFAULT %s FOR %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_server_default(compiler, element.default), - format_column_name(compiler, element.column_name), - ) - - -@compiles(ColumnName, "mssql") -def visit_rename_column( - element: ColumnName, compiler: MSDDLCompiler, **kw -) -> str: - return "EXEC sp_rename '%s.%s', %s, 'COLUMN'" % ( - format_table_name(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - format_column_name(compiler, element.newname), - ) - - -@compiles(ColumnType, "mssql") -def visit_column_type( - element: ColumnType, compiler: MSDDLCompiler, **kw -) -> str: - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - format_type(compiler, element.type_), - ) - - -@compiles(RenameTable, "mssql") -def visit_rename_table( - element: RenameTable, compiler: MSDDLCompiler, **kw -) -> str: - return "EXEC sp_rename '%s', %s" % ( - format_table_name(compiler, element.table_name, element.schema), - format_table_name(compiler, element.new_table_name, None), - ) diff --git a/myenv/Lib/site-packages/alembic/ddl/mysql.py b/myenv/Lib/site-packages/alembic/ddl/mysql.py deleted file mode 100644 index 3482f67..0000000 --- a/myenv/Lib/site-packages/alembic/ddl/mysql.py +++ /dev/null @@ -1,492 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import re -from typing import Any -from typing import Optional -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy import schema -from sqlalchemy import types as sqltypes - -from .base import alter_table -from .base import AlterColumn -from .base import ColumnDefault -from .base import ColumnName -from .base import ColumnNullable -from .base import ColumnType -from .base import format_column_name -from .base import format_server_default -from .impl import DefaultImpl -from .. import util -from ..util import sqla_compat -from ..util.sqla_compat import _is_mariadb -from ..util.sqla_compat import _is_type_bound -from ..util.sqla_compat import compiles - -if TYPE_CHECKING: - from typing import Literal - - from sqlalchemy.dialects.mysql.base import MySQLDDLCompiler - from sqlalchemy.sql.ddl import DropConstraint - from sqlalchemy.sql.schema import Constraint - from sqlalchemy.sql.type_api import TypeEngine - - from .base import _ServerDefault - - -class MySQLImpl(DefaultImpl): - __dialect__ = "mysql" - - transactional_ddl = False - type_synonyms = DefaultImpl.type_synonyms + ( - {"BOOL", "TINYINT"}, - {"JSON", "LONGTEXT"}, - ) - type_arg_extract = [r"character set ([\w\-_]+)", r"collate ([\w\-_]+)"] - - def alter_column( # type:ignore[override] - self, - table_name: str, - column_name: str, - nullable: Optional[bool] = None, - server_default: Union[_ServerDefault, Literal[False]] = False, - name: Optional[str] = None, - type_: Optional[TypeEngine] = None, - schema: Optional[str] = None, - existing_type: Optional[TypeEngine] = None, - existing_server_default: Optional[_ServerDefault] = None, - existing_nullable: Optional[bool] = None, - autoincrement: Optional[bool] = None, - existing_autoincrement: Optional[bool] = None, - comment: Optional[Union[str, Literal[False]]] = False, - existing_comment: Optional[str] = None, - **kw: Any, - ) -> None: - if sqla_compat._server_default_is_identity( - server_default, existing_server_default - ) or sqla_compat._server_default_is_computed( - server_default, existing_server_default - ): - # modifying computed or identity columns is not supported - # the default will raise - super().alter_column( - table_name, - column_name, - nullable=nullable, - type_=type_, - schema=schema, - existing_type=existing_type, - existing_nullable=existing_nullable, - server_default=server_default, - existing_server_default=existing_server_default, - **kw, - ) - if name is not None or self._is_mysql_allowed_functional_default( - type_ if type_ is not None else existing_type, server_default - ): - self._exec( - MySQLChangeColumn( - table_name, - column_name, - schema=schema, - newname=name if name is not None else column_name, - nullable=( - nullable - if nullable is not None - else ( - existing_nullable - if existing_nullable is not None - else True - ) - ), - type_=type_ if type_ is not None else existing_type, - default=( - server_default - if server_default is not False - else existing_server_default - ), - autoincrement=( - autoincrement - if autoincrement is not None - else existing_autoincrement - ), - comment=( - comment if comment is not False else existing_comment - ), - ) - ) - elif ( - nullable is not None - or type_ is not None - or autoincrement is not None - or comment is not False - ): - self._exec( - MySQLModifyColumn( - table_name, - column_name, - schema=schema, - newname=name if name is not None else column_name, - nullable=( - nullable - if nullable is not None - else ( - existing_nullable - if existing_nullable is not None - else True - ) - ), - type_=type_ if type_ is not None else existing_type, - default=( - server_default - if server_default is not False - else existing_server_default - ), - autoincrement=( - autoincrement - if autoincrement is not None - else existing_autoincrement - ), - comment=( - comment if comment is not False else existing_comment - ), - ) - ) - elif server_default is not False: - self._exec( - MySQLAlterDefault( - table_name, column_name, server_default, schema=schema - ) - ) - - def drop_constraint( - self, - const: Constraint, - ) -> None: - if isinstance(const, schema.CheckConstraint) and _is_type_bound(const): - return - - super().drop_constraint(const) - - def _is_mysql_allowed_functional_default( - self, - type_: Optional[TypeEngine], - server_default: Union[_ServerDefault, Literal[False]], - ) -> bool: - return ( - type_ is not None - and type_._type_affinity is sqltypes.DateTime - and server_default is not None - ) - - def compare_server_default( - self, - inspector_column, - metadata_column, - rendered_metadata_default, - rendered_inspector_default, - ): - # partially a workaround for SQLAlchemy issue #3023; if the - # column were created without "NOT NULL", MySQL may have added - # an implicit default of '0' which we need to skip - # TODO: this is not really covered anymore ? - if ( - metadata_column.type._type_affinity is sqltypes.Integer - and inspector_column.primary_key - and not inspector_column.autoincrement - and not rendered_metadata_default - and rendered_inspector_default == "'0'" - ): - return False - elif ( - rendered_inspector_default - and inspector_column.type._type_affinity is sqltypes.Integer - ): - rendered_inspector_default = ( - re.sub(r"^'|'$", "", rendered_inspector_default) - if rendered_inspector_default is not None - else None - ) - return rendered_inspector_default != rendered_metadata_default - elif ( - rendered_metadata_default - and metadata_column.type._type_affinity is sqltypes.String - ): - metadata_default = re.sub(r"^'|'$", "", rendered_metadata_default) - return rendered_inspector_default != f"'{metadata_default}'" - elif rendered_inspector_default and rendered_metadata_default: - # adjust for "function()" vs. "FUNCTION" as can occur particularly - # for the CURRENT_TIMESTAMP function on newer MariaDB versions - - # SQLAlchemy MySQL dialect bundles ON UPDATE into the server - # default; adjust for this possibly being present. - onupdate_ins = re.match( - r"(.*) (on update.*?)(?:\(\))?$", - rendered_inspector_default.lower(), - ) - onupdate_met = re.match( - r"(.*) (on update.*?)(?:\(\))?$", - rendered_metadata_default.lower(), - ) - - if onupdate_ins: - if not onupdate_met: - return True - elif onupdate_ins.group(2) != onupdate_met.group(2): - return True - - rendered_inspector_default = onupdate_ins.group(1) - rendered_metadata_default = onupdate_met.group(1) - - return re.sub( - r"(.*?)(?:\(\))?$", r"\1", rendered_inspector_default.lower() - ) != re.sub( - r"(.*?)(?:\(\))?$", r"\1", rendered_metadata_default.lower() - ) - else: - return rendered_inspector_default != rendered_metadata_default - - def correct_for_autogen_constraints( - self, - conn_unique_constraints, - conn_indexes, - metadata_unique_constraints, - metadata_indexes, - ): - # TODO: if SQLA 1.0, make use of "duplicates_index" - # metadata - removed = set() - for idx in list(conn_indexes): - if idx.unique: - continue - # MySQL puts implicit indexes on FK columns, even if - # composite and even if MyISAM, so can't check this too easily. - # the name of the index may be the column name or it may - # be the name of the FK constraint. - for col in idx.columns: - if idx.name == col.name: - conn_indexes.remove(idx) - removed.add(idx.name) - break - for fk in col.foreign_keys: - if fk.name == idx.name: - conn_indexes.remove(idx) - removed.add(idx.name) - break - if idx.name in removed: - break - - # then remove indexes from the "metadata_indexes" - # that we've removed from reflected, otherwise they come out - # as adds (see #202) - for idx in list(metadata_indexes): - if idx.name in removed: - metadata_indexes.remove(idx) - - def correct_for_autogen_foreignkeys(self, conn_fks, metadata_fks): - conn_fk_by_sig = { - self._create_reflected_constraint_sig(fk).unnamed_no_options: fk - for fk in conn_fks - } - metadata_fk_by_sig = { - self._create_metadata_constraint_sig(fk).unnamed_no_options: fk - for fk in metadata_fks - } - - for sig in set(conn_fk_by_sig).intersection(metadata_fk_by_sig): - mdfk = metadata_fk_by_sig[sig] - cnfk = conn_fk_by_sig[sig] - # MySQL considers RESTRICT to be the default and doesn't - # report on it. if the model has explicit RESTRICT and - # the conn FK has None, set it to RESTRICT - if ( - mdfk.ondelete is not None - and mdfk.ondelete.lower() == "restrict" - and cnfk.ondelete is None - ): - cnfk.ondelete = "RESTRICT" - if ( - mdfk.onupdate is not None - and mdfk.onupdate.lower() == "restrict" - and cnfk.onupdate is None - ): - cnfk.onupdate = "RESTRICT" - - -class MariaDBImpl(MySQLImpl): - __dialect__ = "mariadb" - - -class MySQLAlterDefault(AlterColumn): - def __init__( - self, - name: str, - column_name: str, - default: _ServerDefault, - schema: Optional[str] = None, - ) -> None: - super(AlterColumn, self).__init__(name, schema=schema) - self.column_name = column_name - self.default = default - - -class MySQLChangeColumn(AlterColumn): - def __init__( - self, - name: str, - column_name: str, - schema: Optional[str] = None, - newname: Optional[str] = None, - type_: Optional[TypeEngine] = None, - nullable: Optional[bool] = None, - default: Optional[Union[_ServerDefault, Literal[False]]] = False, - autoincrement: Optional[bool] = None, - comment: Optional[Union[str, Literal[False]]] = False, - ) -> None: - super(AlterColumn, self).__init__(name, schema=schema) - self.column_name = column_name - self.nullable = nullable - self.newname = newname - self.default = default - self.autoincrement = autoincrement - self.comment = comment - if type_ is None: - raise util.CommandError( - "All MySQL CHANGE/MODIFY COLUMN operations " - "require the existing type." - ) - - self.type_ = sqltypes.to_instance(type_) - - -class MySQLModifyColumn(MySQLChangeColumn): - pass - - -@compiles(ColumnNullable, "mysql", "mariadb") -@compiles(ColumnName, "mysql", "mariadb") -@compiles(ColumnDefault, "mysql", "mariadb") -@compiles(ColumnType, "mysql", "mariadb") -def _mysql_doesnt_support_individual(element, compiler, **kw): - raise NotImplementedError( - "Individual alter column constructs not supported by MySQL" - ) - - -@compiles(MySQLAlterDefault, "mysql", "mariadb") -def _mysql_alter_default( - element: MySQLAlterDefault, compiler: MySQLDDLCompiler, **kw -) -> str: - return "%s ALTER COLUMN %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - ( - "SET DEFAULT %s" % format_server_default(compiler, element.default) - if element.default is not None - else "DROP DEFAULT" - ), - ) - - -@compiles(MySQLModifyColumn, "mysql", "mariadb") -def _mysql_modify_column( - element: MySQLModifyColumn, compiler: MySQLDDLCompiler, **kw -) -> str: - return "%s MODIFY %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - _mysql_colspec( - compiler, - nullable=element.nullable, - server_default=element.default, - type_=element.type_, - autoincrement=element.autoincrement, - comment=element.comment, - ), - ) - - -@compiles(MySQLChangeColumn, "mysql", "mariadb") -def _mysql_change_column( - element: MySQLChangeColumn, compiler: MySQLDDLCompiler, **kw -) -> str: - return "%s CHANGE %s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - format_column_name(compiler, element.newname), - _mysql_colspec( - compiler, - nullable=element.nullable, - server_default=element.default, - type_=element.type_, - autoincrement=element.autoincrement, - comment=element.comment, - ), - ) - - -def _mysql_colspec( - compiler: MySQLDDLCompiler, - nullable: Optional[bool], - server_default: Optional[Union[_ServerDefault, Literal[False]]], - type_: TypeEngine, - autoincrement: Optional[bool], - comment: Optional[Union[str, Literal[False]]], -) -> str: - spec = "%s %s" % ( - compiler.dialect.type_compiler.process(type_), - "NULL" if nullable else "NOT NULL", - ) - if autoincrement: - spec += " AUTO_INCREMENT" - if server_default is not False and server_default is not None: - spec += " DEFAULT %s" % format_server_default(compiler, server_default) - if comment: - spec += " COMMENT %s" % compiler.sql_compiler.render_literal_value( - comment, sqltypes.String() - ) - - return spec - - -@compiles(schema.DropConstraint, "mysql", "mariadb") -def _mysql_drop_constraint( - element: DropConstraint, compiler: MySQLDDLCompiler, **kw -) -> str: - """Redefine SQLAlchemy's drop constraint to - raise errors for invalid constraint type.""" - - constraint = element.element - if isinstance( - constraint, - ( - schema.ForeignKeyConstraint, - schema.PrimaryKeyConstraint, - schema.UniqueConstraint, - ), - ): - assert not kw - return compiler.visit_drop_constraint(element) - elif isinstance(constraint, schema.CheckConstraint): - # note that SQLAlchemy as of 1.2 does not yet support - # DROP CONSTRAINT for MySQL/MariaDB, so we implement fully - # here. - if _is_mariadb(compiler.dialect): - return "ALTER TABLE %s DROP CONSTRAINT %s" % ( - compiler.preparer.format_table(constraint.table), - compiler.preparer.format_constraint(constraint), - ) - else: - return "ALTER TABLE %s DROP CHECK %s" % ( - compiler.preparer.format_table(constraint.table), - compiler.preparer.format_constraint(constraint), - ) - else: - raise NotImplementedError( - "No generic 'DROP CONSTRAINT' in MySQL - " - "please specify constraint type" - ) diff --git a/myenv/Lib/site-packages/alembic/ddl/oracle.py b/myenv/Lib/site-packages/alembic/ddl/oracle.py deleted file mode 100644 index eac9912..0000000 --- a/myenv/Lib/site-packages/alembic/ddl/oracle.py +++ /dev/null @@ -1,202 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import re -from typing import Any -from typing import Optional -from typing import TYPE_CHECKING - -from sqlalchemy.sql import sqltypes - -from .base import AddColumn -from .base import alter_table -from .base import ColumnComment -from .base import ColumnDefault -from .base import ColumnName -from .base import ColumnNullable -from .base import ColumnType -from .base import format_column_name -from .base import format_server_default -from .base import format_table_name -from .base import format_type -from .base import IdentityColumnDefault -from .base import RenameTable -from .impl import DefaultImpl -from ..util.sqla_compat import compiles - -if TYPE_CHECKING: - from sqlalchemy.dialects.oracle.base import OracleDDLCompiler - from sqlalchemy.engine.cursor import CursorResult - from sqlalchemy.sql.schema import Column - - -class OracleImpl(DefaultImpl): - __dialect__ = "oracle" - transactional_ddl = False - batch_separator = "/" - command_terminator = "" - type_synonyms = DefaultImpl.type_synonyms + ( - {"VARCHAR", "VARCHAR2"}, - {"BIGINT", "INTEGER", "SMALLINT", "DECIMAL", "NUMERIC", "NUMBER"}, - {"DOUBLE", "FLOAT", "DOUBLE_PRECISION"}, - ) - identity_attrs_ignore = () - - def __init__(self, *arg, **kw) -> None: - super().__init__(*arg, **kw) - self.batch_separator = self.context_opts.get( - "oracle_batch_separator", self.batch_separator - ) - - def _exec(self, construct: Any, *args, **kw) -> Optional[CursorResult]: - result = super()._exec(construct, *args, **kw) - if self.as_sql and self.batch_separator: - self.static_output(self.batch_separator) - return result - - def compare_server_default( - self, - inspector_column, - metadata_column, - rendered_metadata_default, - rendered_inspector_default, - ): - if rendered_metadata_default is not None: - rendered_metadata_default = re.sub( - r"^\((.+)\)$", r"\1", rendered_metadata_default - ) - - rendered_metadata_default = re.sub( - r"^\"?'(.+)'\"?$", r"\1", rendered_metadata_default - ) - - if rendered_inspector_default is not None: - rendered_inspector_default = re.sub( - r"^\((.+)\)$", r"\1", rendered_inspector_default - ) - - rendered_inspector_default = re.sub( - r"^\"?'(.+)'\"?$", r"\1", rendered_inspector_default - ) - - rendered_inspector_default = rendered_inspector_default.strip() - return rendered_inspector_default != rendered_metadata_default - - def emit_begin(self) -> None: - self._exec("SET TRANSACTION READ WRITE") - - def emit_commit(self) -> None: - self._exec("COMMIT") - - -@compiles(AddColumn, "oracle") -def visit_add_column( - element: AddColumn, compiler: OracleDDLCompiler, **kw -) -> str: - return "%s %s" % ( - alter_table(compiler, element.table_name, element.schema), - add_column(compiler, element.column, **kw), - ) - - -@compiles(ColumnNullable, "oracle") -def visit_column_nullable( - element: ColumnNullable, compiler: OracleDDLCompiler, **kw -) -> str: - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "NULL" if element.nullable else "NOT NULL", - ) - - -@compiles(ColumnType, "oracle") -def visit_column_type( - element: ColumnType, compiler: OracleDDLCompiler, **kw -) -> str: - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "%s" % format_type(compiler, element.type_), - ) - - -@compiles(ColumnName, "oracle") -def visit_column_name( - element: ColumnName, compiler: OracleDDLCompiler, **kw -) -> str: - return "%s RENAME COLUMN %s TO %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_column_name(compiler, element.column_name), - format_column_name(compiler, element.newname), - ) - - -@compiles(ColumnDefault, "oracle") -def visit_column_default( - element: ColumnDefault, compiler: OracleDDLCompiler, **kw -) -> str: - return "%s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - ( - "DEFAULT %s" % format_server_default(compiler, element.default) - if element.default is not None - else "DEFAULT NULL" - ), - ) - - -@compiles(ColumnComment, "oracle") -def visit_column_comment( - element: ColumnComment, compiler: OracleDDLCompiler, **kw -) -> str: - ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}" - - comment = compiler.sql_compiler.render_literal_value( - (element.comment if element.comment is not None else ""), - sqltypes.String(), - ) - - return ddl.format( - table_name=element.table_name, - column_name=element.column_name, - comment=comment, - ) - - -@compiles(RenameTable, "oracle") -def visit_rename_table( - element: RenameTable, compiler: OracleDDLCompiler, **kw -) -> str: - return "%s RENAME TO %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_table_name(compiler, element.new_table_name, None), - ) - - -def alter_column(compiler: OracleDDLCompiler, name: str) -> str: - return "MODIFY %s" % format_column_name(compiler, name) - - -def add_column(compiler: OracleDDLCompiler, column: Column[Any], **kw) -> str: - return "ADD %s" % compiler.get_column_specification(column, **kw) - - -@compiles(IdentityColumnDefault, "oracle") -def visit_identity_column( - element: IdentityColumnDefault, compiler: OracleDDLCompiler, **kw -): - text = "%s %s " % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - ) - if element.default is None: - # drop identity - text += "DROP IDENTITY" - return text - else: - text += compiler.visit_identity_column(element.default) - return text diff --git a/myenv/Lib/site-packages/alembic/ddl/postgresql.py b/myenv/Lib/site-packages/alembic/ddl/postgresql.py deleted file mode 100644 index de64a4e..0000000 --- a/myenv/Lib/site-packages/alembic/ddl/postgresql.py +++ /dev/null @@ -1,849 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import logging -import re -from typing import Any -from typing import cast -from typing import Dict -from typing import List -from typing import Optional -from typing import Sequence -from typing import Tuple -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy import Column -from sqlalchemy import literal_column -from sqlalchemy import Numeric -from sqlalchemy import text -from sqlalchemy import types as sqltypes -from sqlalchemy.dialects.postgresql import BIGINT -from sqlalchemy.dialects.postgresql import ExcludeConstraint -from sqlalchemy.dialects.postgresql import INTEGER -from sqlalchemy.schema import CreateIndex -from sqlalchemy.sql.elements import ColumnClause -from sqlalchemy.sql.elements import TextClause -from sqlalchemy.sql.functions import FunctionElement -from sqlalchemy.types import NULLTYPE - -from .base import alter_column -from .base import alter_table -from .base import AlterColumn -from .base import ColumnComment -from .base import format_column_name -from .base import format_table_name -from .base import format_type -from .base import IdentityColumnDefault -from .base import RenameTable -from .impl import ComparisonResult -from .impl import DefaultImpl -from .. import util -from ..autogenerate import render -from ..operations import ops -from ..operations import schemaobj -from ..operations.base import BatchOperations -from ..operations.base import Operations -from ..util import sqla_compat -from ..util.sqla_compat import compiles - -if TYPE_CHECKING: - from typing import Literal - - from sqlalchemy import Index - from sqlalchemy import UniqueConstraint - from sqlalchemy.dialects.postgresql.array import ARRAY - from sqlalchemy.dialects.postgresql.base import PGDDLCompiler - from sqlalchemy.dialects.postgresql.hstore import HSTORE - from sqlalchemy.dialects.postgresql.json import JSON - from sqlalchemy.dialects.postgresql.json import JSONB - from sqlalchemy.sql.elements import ClauseElement - from sqlalchemy.sql.elements import ColumnElement - from sqlalchemy.sql.elements import quoted_name - from sqlalchemy.sql.schema import MetaData - from sqlalchemy.sql.schema import Table - from sqlalchemy.sql.type_api import TypeEngine - - from .base import _ServerDefault - from ..autogenerate.api import AutogenContext - from ..autogenerate.render import _f_name - from ..runtime.migration import MigrationContext - - -log = logging.getLogger(__name__) - - -class PostgresqlImpl(DefaultImpl): - __dialect__ = "postgresql" - transactional_ddl = True - type_synonyms = DefaultImpl.type_synonyms + ( - {"FLOAT", "DOUBLE PRECISION"}, - ) - - def create_index(self, index: Index, **kw: Any) -> None: - # this likely defaults to None if not present, so get() - # should normally not return the default value. being - # defensive in any case - postgresql_include = index.kwargs.get("postgresql_include", None) or () - for col in postgresql_include: - if col not in index.table.c: # type: ignore[union-attr] - index.table.append_column( # type: ignore[union-attr] - Column(col, sqltypes.NullType) - ) - self._exec(CreateIndex(index, **kw)) - - def prep_table_for_batch(self, batch_impl, table): - for constraint in table.constraints: - if ( - constraint.name is not None - and constraint.name in batch_impl.named_constraints - ): - self.drop_constraint(constraint) - - def compare_server_default( - self, - inspector_column, - metadata_column, - rendered_metadata_default, - rendered_inspector_default, - ): - # don't do defaults for SERIAL columns - if ( - metadata_column.primary_key - and metadata_column is metadata_column.table._autoincrement_column - ): - return False - - conn_col_default = rendered_inspector_default - - defaults_equal = conn_col_default == rendered_metadata_default - if defaults_equal: - return False - - if None in ( - conn_col_default, - rendered_metadata_default, - metadata_column.server_default, - ): - return not defaults_equal - - metadata_default = metadata_column.server_default.arg - - if isinstance(metadata_default, str): - if not isinstance(inspector_column.type, Numeric): - metadata_default = re.sub(r"^'|'$", "", metadata_default) - metadata_default = f"'{metadata_default}'" - - metadata_default = literal_column(metadata_default) - - # run a real compare against the server - conn = self.connection - assert conn is not None - return not conn.scalar( - sqla_compat._select( - literal_column(conn_col_default) == metadata_default - ) - ) - - def alter_column( # type:ignore[override] - self, - table_name: str, - column_name: str, - nullable: Optional[bool] = None, - server_default: Union[_ServerDefault, Literal[False]] = False, - name: Optional[str] = None, - type_: Optional[TypeEngine] = None, - schema: Optional[str] = None, - autoincrement: Optional[bool] = None, - existing_type: Optional[TypeEngine] = None, - existing_server_default: Optional[_ServerDefault] = None, - existing_nullable: Optional[bool] = None, - existing_autoincrement: Optional[bool] = None, - **kw: Any, - ) -> None: - using = kw.pop("postgresql_using", None) - - if using is not None and type_ is None: - raise util.CommandError( - "postgresql_using must be used with the type_ parameter" - ) - - if type_ is not None: - self._exec( - PostgresqlColumnType( - table_name, - column_name, - type_, - schema=schema, - using=using, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - ) - ) - - super().alter_column( - table_name, - column_name, - nullable=nullable, - server_default=server_default, - name=name, - schema=schema, - autoincrement=autoincrement, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - existing_autoincrement=existing_autoincrement, - **kw, - ) - - def autogen_column_reflect(self, inspector, table, column_info): - if column_info.get("default") and isinstance( - column_info["type"], (INTEGER, BIGINT) - ): - seq_match = re.match( - r"nextval\('(.+?)'::regclass\)", column_info["default"] - ) - if seq_match: - info = sqla_compat._exec_on_inspector( - inspector, - text( - "select c.relname, a.attname " - "from pg_class as c join " - "pg_depend d on d.objid=c.oid and " - "d.classid='pg_class'::regclass and " - "d.refclassid='pg_class'::regclass " - "join pg_class t on t.oid=d.refobjid " - "join pg_attribute a on a.attrelid=t.oid and " - "a.attnum=d.refobjsubid " - "where c.relkind='S' and " - "c.oid=cast(:seqname as regclass)" - ), - seqname=seq_match.group(1), - ).first() - if info: - seqname, colname = info - if colname == column_info["name"]: - log.info( - "Detected sequence named '%s' as " - "owned by integer column '%s(%s)', " - "assuming SERIAL and omitting", - seqname, - table.name, - colname, - ) - # sequence, and the owner is this column, - # its a SERIAL - whack it! - del column_info["default"] - - def correct_for_autogen_constraints( - self, - conn_unique_constraints, - conn_indexes, - metadata_unique_constraints, - metadata_indexes, - ): - doubled_constraints = { - index - for index in conn_indexes - if index.info.get("duplicates_constraint") - } - - for ix in doubled_constraints: - conn_indexes.remove(ix) - - if not sqla_compat.sqla_2: - self._skip_functional_indexes(metadata_indexes, conn_indexes) - - # pg behavior regarding modifiers - # | # | compiled sql | returned sql | regexp. group is removed | - # | - | ---------------- | -----------------| ------------------------ | - # | 1 | nulls first | nulls first | - | - # | 2 | nulls last | | (? str: - expr = expr.lower().replace('"', "").replace("'", "") - if index.table is not None: - # should not be needed, since include_table=False is in compile - expr = expr.replace(f"{index.table.name.lower()}.", "") - - if "::" in expr: - # strip :: cast. types can have spaces in them - expr = re.sub(r"(::[\w ]+\w)", "", expr) - - while expr and expr[0] == "(" and expr[-1] == ")": - expr = expr[1:-1] - - # NOTE: when parsing the connection expression this cleanup could - # be skipped - for rs in self._default_modifiers_re: - if match := rs.search(expr): - start, end = match.span(1) - expr = expr[:start] + expr[end:] - break - - while expr and expr[0] == "(" and expr[-1] == ")": - expr = expr[1:-1] - - # strip casts - cast_re = re.compile(r"cast\s*\(") - if cast_re.match(expr): - expr = cast_re.sub("", expr) - # remove the as type - expr = re.sub(r"as\s+[^)]+\)", "", expr) - # remove spaces - expr = expr.replace(" ", "") - return expr - - def _dialect_options( - self, item: Union[Index, UniqueConstraint] - ) -> Tuple[Any, ...]: - # only the positive case is returned by sqlalchemy reflection so - # None and False are threated the same - if item.dialect_kwargs.get("postgresql_nulls_not_distinct"): - return ("nulls_not_distinct",) - return () - - def compare_indexes( - self, - metadata_index: Index, - reflected_index: Index, - ) -> ComparisonResult: - msg = [] - unique_msg = self._compare_index_unique( - metadata_index, reflected_index - ) - if unique_msg: - msg.append(unique_msg) - m_exprs = metadata_index.expressions - r_exprs = reflected_index.expressions - if len(m_exprs) != len(r_exprs): - msg.append(f"expression number {len(r_exprs)} to {len(m_exprs)}") - if msg: - # no point going further, return early - return ComparisonResult.Different(msg) - skip = [] - for pos, (m_e, r_e) in enumerate(zip(m_exprs, r_exprs), 1): - m_compile = self._compile_element(m_e) - m_text = self._cleanup_index_expr(metadata_index, m_compile) - # print(f"META ORIG: {m_compile!r} CLEANUP: {m_text!r}") - r_compile = self._compile_element(r_e) - r_text = self._cleanup_index_expr(metadata_index, r_compile) - # print(f"CONN ORIG: {r_compile!r} CLEANUP: {r_text!r}") - if m_text == r_text: - continue # expressions these are equal - elif m_compile.strip().endswith("_ops") and ( - " " in m_compile or ")" in m_compile # is an expression - ): - skip.append( - f"expression #{pos} {m_compile!r} detected " - "as including operator clause." - ) - util.warn( - f"Expression #{pos} {m_compile!r} in index " - f"{reflected_index.name!r} detected to include " - "an operator clause. Expression compare cannot proceed. " - "Please move the operator clause to the " - "``postgresql_ops`` dict to enable proper compare " - "of the index expressions: " - "https://docs.sqlalchemy.org/en/latest/dialects/postgresql.html#operator-classes", # noqa: E501 - ) - else: - msg.append(f"expression #{pos} {r_compile!r} to {m_compile!r}") - - m_options = self._dialect_options(metadata_index) - r_options = self._dialect_options(reflected_index) - if m_options != r_options: - msg.extend(f"options {r_options} to {m_options}") - - if msg: - return ComparisonResult.Different(msg) - elif skip: - # if there are other changes detected don't skip the index - return ComparisonResult.Skip(skip) - else: - return ComparisonResult.Equal() - - def compare_unique_constraint( - self, - metadata_constraint: UniqueConstraint, - reflected_constraint: UniqueConstraint, - ) -> ComparisonResult: - metadata_tup = self._create_metadata_constraint_sig( - metadata_constraint - ) - reflected_tup = self._create_reflected_constraint_sig( - reflected_constraint - ) - - meta_sig = metadata_tup.unnamed - conn_sig = reflected_tup.unnamed - if conn_sig != meta_sig: - return ComparisonResult.Different( - f"expression {conn_sig} to {meta_sig}" - ) - - metadata_do = self._dialect_options(metadata_tup.const) - conn_do = self._dialect_options(reflected_tup.const) - if metadata_do != conn_do: - return ComparisonResult.Different( - f"expression {conn_do} to {metadata_do}" - ) - - return ComparisonResult.Equal() - - def adjust_reflected_dialect_options( - self, reflected_options: Dict[str, Any], kind: str - ) -> Dict[str, Any]: - options: Dict[str, Any] - options = reflected_options.get("dialect_options", {}).copy() - if not options.get("postgresql_include"): - options.pop("postgresql_include", None) - return options - - def _compile_element(self, element: Union[ClauseElement, str]) -> str: - if isinstance(element, str): - return element - return element.compile( - dialect=self.dialect, - compile_kwargs={"literal_binds": True, "include_table": False}, - ).string - - def render_ddl_sql_expr( - self, - expr: ClauseElement, - is_server_default: bool = False, - is_index: bool = False, - **kw: Any, - ) -> str: - """Render a SQL expression that is typically a server default, - index expression, etc. - - """ - - # apply self_group to index expressions; - # see https://github.com/sqlalchemy/sqlalchemy/blob/ - # 82fa95cfce070fab401d020c6e6e4a6a96cc2578/ - # lib/sqlalchemy/dialects/postgresql/base.py#L2261 - if is_index and not isinstance(expr, ColumnClause): - expr = expr.self_group() - - return super().render_ddl_sql_expr( - expr, is_server_default=is_server_default, is_index=is_index, **kw - ) - - def render_type( - self, type_: TypeEngine, autogen_context: AutogenContext - ) -> Union[str, Literal[False]]: - mod = type(type_).__module__ - if not mod.startswith("sqlalchemy.dialects.postgresql"): - return False - - if hasattr(self, "_render_%s_type" % type_.__visit_name__): - meth = getattr(self, "_render_%s_type" % type_.__visit_name__) - return meth(type_, autogen_context) - - return False - - def _render_HSTORE_type( - self, type_: HSTORE, autogen_context: AutogenContext - ) -> str: - return cast( - str, - render._render_type_w_subtype( - type_, autogen_context, "text_type", r"(.+?\(.*text_type=)" - ), - ) - - def _render_ARRAY_type( - self, type_: ARRAY, autogen_context: AutogenContext - ) -> str: - return cast( - str, - render._render_type_w_subtype( - type_, autogen_context, "item_type", r"(.+?\()" - ), - ) - - def _render_JSON_type( - self, type_: JSON, autogen_context: AutogenContext - ) -> str: - return cast( - str, - render._render_type_w_subtype( - type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)" - ), - ) - - def _render_JSONB_type( - self, type_: JSONB, autogen_context: AutogenContext - ) -> str: - return cast( - str, - render._render_type_w_subtype( - type_, autogen_context, "astext_type", r"(.+?\(.*astext_type=)" - ), - ) - - -class PostgresqlColumnType(AlterColumn): - def __init__( - self, name: str, column_name: str, type_: TypeEngine, **kw - ) -> None: - using = kw.pop("using", None) - super().__init__(name, column_name, **kw) - self.type_ = sqltypes.to_instance(type_) - self.using = using - - -@compiles(RenameTable, "postgresql") -def visit_rename_table( - element: RenameTable, compiler: PGDDLCompiler, **kw -) -> str: - return "%s RENAME TO %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_table_name(compiler, element.new_table_name, None), - ) - - -@compiles(PostgresqlColumnType, "postgresql") -def visit_column_type( - element: PostgresqlColumnType, compiler: PGDDLCompiler, **kw -) -> str: - return "%s %s %s %s" % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - "TYPE %s" % format_type(compiler, element.type_), - "USING %s" % element.using if element.using else "", - ) - - -@compiles(ColumnComment, "postgresql") -def visit_column_comment( - element: ColumnComment, compiler: PGDDLCompiler, **kw -) -> str: - ddl = "COMMENT ON COLUMN {table_name}.{column_name} IS {comment}" - comment = ( - compiler.sql_compiler.render_literal_value( - element.comment, sqltypes.String() - ) - if element.comment is not None - else "NULL" - ) - - return ddl.format( - table_name=format_table_name( - compiler, element.table_name, element.schema - ), - column_name=format_column_name(compiler, element.column_name), - comment=comment, - ) - - -@compiles(IdentityColumnDefault, "postgresql") -def visit_identity_column( - element: IdentityColumnDefault, compiler: PGDDLCompiler, **kw -): - text = "%s %s " % ( - alter_table(compiler, element.table_name, element.schema), - alter_column(compiler, element.column_name), - ) - if element.default is None: - # drop identity - text += "DROP IDENTITY" - return text - elif element.existing_server_default is None: - # add identity options - text += "ADD " - text += compiler.visit_identity_column(element.default) - return text - else: - # alter identity - diff, _, _ = element.impl._compare_identity_default( - element.default, element.existing_server_default - ) - identity = element.default - for attr in sorted(diff): - if attr == "always": - text += "SET GENERATED %s " % ( - "ALWAYS" if identity.always else "BY DEFAULT" - ) - else: - text += "SET %s " % compiler.get_identity_options( - sqla_compat.Identity(**{attr: getattr(identity, attr)}) - ) - return text - - -@Operations.register_operation("create_exclude_constraint") -@BatchOperations.register_operation( - "create_exclude_constraint", "batch_create_exclude_constraint" -) -@ops.AddConstraintOp.register_add_constraint("exclude_constraint") -class CreateExcludeConstraintOp(ops.AddConstraintOp): - """Represent a create exclude constraint operation.""" - - constraint_type = "exclude" - - def __init__( - self, - constraint_name: sqla_compat._ConstraintName, - table_name: Union[str, quoted_name], - elements: Union[ - Sequence[Tuple[str, str]], - Sequence[Tuple[ColumnClause[Any], str]], - ], - where: Optional[Union[ColumnElement[bool], str]] = None, - schema: Optional[str] = None, - _orig_constraint: Optional[ExcludeConstraint] = None, - **kw, - ) -> None: - self.constraint_name = constraint_name - self.table_name = table_name - self.elements = elements - self.where = where - self.schema = schema - self._orig_constraint = _orig_constraint - self.kw = kw - - @classmethod - def from_constraint( # type:ignore[override] - cls, constraint: ExcludeConstraint - ) -> CreateExcludeConstraintOp: - constraint_table = sqla_compat._table_for_constraint(constraint) - return cls( - constraint.name, - constraint_table.name, - [ # type: ignore - (expr, op) for expr, name, op in constraint._render_exprs - ], - where=cast("ColumnElement[bool] | None", constraint.where), - schema=constraint_table.schema, - _orig_constraint=constraint, - deferrable=constraint.deferrable, - initially=constraint.initially, - using=constraint.using, - ) - - def to_constraint( - self, migration_context: Optional[MigrationContext] = None - ) -> ExcludeConstraint: - if self._orig_constraint is not None: - return self._orig_constraint - schema_obj = schemaobj.SchemaObjects(migration_context) - t = schema_obj.table(self.table_name, schema=self.schema) - excl = ExcludeConstraint( - *self.elements, - name=self.constraint_name, - where=self.where, - **self.kw, - ) - for ( - expr, - name, - oper, - ) in excl._render_exprs: - t.append_column(Column(name, NULLTYPE)) - t.append_constraint(excl) - return excl - - @classmethod - def create_exclude_constraint( - cls, - operations: Operations, - constraint_name: str, - table_name: str, - *elements: Any, - **kw: Any, - ) -> Optional[Table]: - """Issue an alter to create an EXCLUDE constraint using the - current migration context. - - .. note:: This method is Postgresql specific, and additionally - requires at least SQLAlchemy 1.0. - - e.g.:: - - from alembic import op - - op.create_exclude_constraint( - "user_excl", - "user", - ("period", "&&"), - ("group", "="), - where=("group != 'some group'"), - ) - - Note that the expressions work the same way as that of - the ``ExcludeConstraint`` object itself; if plain strings are - passed, quoting rules must be applied manually. - - :param name: Name of the constraint. - :param table_name: String name of the source table. - :param elements: exclude conditions. - :param where: SQL expression or SQL string with optional WHERE - clause. - :param deferrable: optional bool. If set, emit DEFERRABLE or - NOT DEFERRABLE when issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY - when issuing DDL for this constraint. - :param schema: Optional schema name to operate within. - - """ - op = cls(constraint_name, table_name, elements, **kw) - return operations.invoke(op) - - @classmethod - def batch_create_exclude_constraint( - cls, - operations: BatchOperations, - constraint_name: str, - *elements: Any, - **kw: Any, - ) -> Optional[Table]: - """Issue a "create exclude constraint" instruction using the - current batch migration context. - - .. note:: This method is Postgresql specific, and additionally - requires at least SQLAlchemy 1.0. - - .. seealso:: - - :meth:`.Operations.create_exclude_constraint` - - """ - kw["schema"] = operations.impl.schema - op = cls(constraint_name, operations.impl.table_name, elements, **kw) - return operations.invoke(op) - - -@render.renderers.dispatch_for(CreateExcludeConstraintOp) -def _add_exclude_constraint( - autogen_context: AutogenContext, op: CreateExcludeConstraintOp -) -> str: - return _exclude_constraint(op.to_constraint(), autogen_context, alter=True) - - -@render._constraint_renderers.dispatch_for(ExcludeConstraint) -def _render_inline_exclude_constraint( - constraint: ExcludeConstraint, - autogen_context: AutogenContext, - namespace_metadata: MetaData, -) -> str: - rendered = render._user_defined_render( - "exclude", constraint, autogen_context - ) - if rendered is not False: - return rendered - - return _exclude_constraint(constraint, autogen_context, False) - - -def _postgresql_autogenerate_prefix(autogen_context: AutogenContext) -> str: - imports = autogen_context.imports - if imports is not None: - imports.add("from sqlalchemy.dialects import postgresql") - return "postgresql." - - -def _exclude_constraint( - constraint: ExcludeConstraint, - autogen_context: AutogenContext, - alter: bool, -) -> str: - opts: List[Tuple[str, Union[quoted_name, str, _f_name, None]]] = [] - - has_batch = autogen_context._has_batch - - if constraint.deferrable: - opts.append(("deferrable", str(constraint.deferrable))) - if constraint.initially: - opts.append(("initially", str(constraint.initially))) - if constraint.using: - opts.append(("using", str(constraint.using))) - if not has_batch and alter and constraint.table.schema: - opts.append(("schema", render._ident(constraint.table.schema))) - if not alter and constraint.name: - opts.append( - ("name", render._render_gen_name(autogen_context, constraint.name)) - ) - - def do_expr_where_opts(): - args = [ - "(%s, %r)" - % ( - _render_potential_column( - sqltext, # type:ignore[arg-type] - autogen_context, - ), - opstring, - ) - for sqltext, name, opstring in constraint._render_exprs - ] - if constraint.where is not None: - args.append( - "where=%s" - % render._render_potential_expr( - constraint.where, autogen_context - ) - ) - args.extend(["%s=%r" % (k, v) for k, v in opts]) - return args - - if alter: - args = [ - repr(render._render_gen_name(autogen_context, constraint.name)) - ] - if not has_batch: - args += [repr(render._ident(constraint.table.name))] - args.extend(do_expr_where_opts()) - return "%(prefix)screate_exclude_constraint(%(args)s)" % { - "prefix": render._alembic_autogenerate_prefix(autogen_context), - "args": ", ".join(args), - } - else: - args = do_expr_where_opts() - return "%(prefix)sExcludeConstraint(%(args)s)" % { - "prefix": _postgresql_autogenerate_prefix(autogen_context), - "args": ", ".join(args), - } - - -def _render_potential_column( - value: Union[ - ColumnClause[Any], Column[Any], TextClause, FunctionElement[Any] - ], - autogen_context: AutogenContext, -) -> str: - if isinstance(value, ColumnClause): - if value.is_literal: - # like literal_column("int8range(from, to)") in ExcludeConstraint - template = "%(prefix)sliteral_column(%(name)r)" - else: - template = "%(prefix)scolumn(%(name)r)" - - return template % { - "prefix": render._sqlalchemy_autogenerate_prefix(autogen_context), - "name": value.name, - } - else: - return render._render_potential_expr( - value, - autogen_context, - wrap_in_text=isinstance(value, (TextClause, FunctionElement)), - ) diff --git a/myenv/Lib/site-packages/alembic/ddl/sqlite.py b/myenv/Lib/site-packages/alembic/ddl/sqlite.py deleted file mode 100644 index 762e8ca..0000000 --- a/myenv/Lib/site-packages/alembic/ddl/sqlite.py +++ /dev/null @@ -1,225 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import re -from typing import Any -from typing import Dict -from typing import Optional -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy import cast -from sqlalchemy import JSON -from sqlalchemy import schema -from sqlalchemy import sql - -from .base import alter_table -from .base import format_table_name -from .base import RenameTable -from .impl import DefaultImpl -from .. import util -from ..util.sqla_compat import compiles - -if TYPE_CHECKING: - from sqlalchemy.engine.reflection import Inspector - from sqlalchemy.sql.compiler import DDLCompiler - from sqlalchemy.sql.elements import Cast - from sqlalchemy.sql.elements import ClauseElement - from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Constraint - from sqlalchemy.sql.schema import Table - from sqlalchemy.sql.type_api import TypeEngine - - from ..operations.batch import BatchOperationsImpl - - -class SQLiteImpl(DefaultImpl): - __dialect__ = "sqlite" - - transactional_ddl = False - """SQLite supports transactional DDL, but pysqlite does not: - see: http://bugs.python.org/issue10740 - """ - - def requires_recreate_in_batch( - self, batch_op: BatchOperationsImpl - ) -> bool: - """Return True if the given :class:`.BatchOperationsImpl` - would need the table to be recreated and copied in order to - proceed. - - Normally, only returns True on SQLite when operations other - than add_column are present. - - """ - for op in batch_op.batch: - if op[0] == "add_column": - col = op[1][1] - if isinstance( - col.server_default, schema.DefaultClause - ) and isinstance(col.server_default.arg, sql.ClauseElement): - return True - elif ( - isinstance(col.server_default, util.sqla_compat.Computed) - and col.server_default.persisted - ): - return True - elif op[0] not in ("create_index", "drop_index"): - return True - else: - return False - - def add_constraint(self, const: Constraint): - # attempt to distinguish between an - # auto-gen constraint and an explicit one - if const._create_rule is None: - raise NotImplementedError( - "No support for ALTER of constraints in SQLite dialect. " - "Please refer to the batch mode feature which allows for " - "SQLite migrations using a copy-and-move strategy." - ) - elif const._create_rule(self): - util.warn( - "Skipping unsupported ALTER for " - "creation of implicit constraint. " - "Please refer to the batch mode feature which allows for " - "SQLite migrations using a copy-and-move strategy." - ) - - def drop_constraint(self, const: Constraint): - if const._create_rule is None: - raise NotImplementedError( - "No support for ALTER of constraints in SQLite dialect. " - "Please refer to the batch mode feature which allows for " - "SQLite migrations using a copy-and-move strategy." - ) - - def compare_server_default( - self, - inspector_column: Column[Any], - metadata_column: Column[Any], - rendered_metadata_default: Optional[str], - rendered_inspector_default: Optional[str], - ) -> bool: - if rendered_metadata_default is not None: - rendered_metadata_default = re.sub( - r"^\((.+)\)$", r"\1", rendered_metadata_default - ) - - rendered_metadata_default = re.sub( - r"^\"?'(.+)'\"?$", r"\1", rendered_metadata_default - ) - - if rendered_inspector_default is not None: - rendered_inspector_default = re.sub( - r"^\((.+)\)$", r"\1", rendered_inspector_default - ) - - rendered_inspector_default = re.sub( - r"^\"?'(.+)'\"?$", r"\1", rendered_inspector_default - ) - - return rendered_inspector_default != rendered_metadata_default - - def _guess_if_default_is_unparenthesized_sql_expr( - self, expr: Optional[str] - ) -> bool: - """Determine if a server default is a SQL expression or a constant. - - There are too many assertions that expect server defaults to round-trip - identically without parenthesis added so we will add parens only in - very specific cases. - - """ - if not expr: - return False - elif re.match(r"^[0-9\.]$", expr): - return False - elif re.match(r"^'.+'$", expr): - return False - elif re.match(r"^\(.+\)$", expr): - return False - else: - return True - - def autogen_column_reflect( - self, - inspector: Inspector, - table: Table, - column_info: Dict[str, Any], - ) -> None: - # SQLite expression defaults require parenthesis when sent - # as DDL - if self._guess_if_default_is_unparenthesized_sql_expr( - column_info.get("default", None) - ): - column_info["default"] = "(%s)" % (column_info["default"],) - - def render_ddl_sql_expr( - self, expr: ClauseElement, is_server_default: bool = False, **kw - ) -> str: - # SQLite expression defaults require parenthesis when sent - # as DDL - str_expr = super().render_ddl_sql_expr( - expr, is_server_default=is_server_default, **kw - ) - - if ( - is_server_default - and self._guess_if_default_is_unparenthesized_sql_expr(str_expr) - ): - str_expr = "(%s)" % (str_expr,) - return str_expr - - def cast_for_batch_migrate( - self, - existing: Column[Any], - existing_transfer: Dict[str, Union[TypeEngine, Cast]], - new_type: TypeEngine, - ) -> None: - if ( - existing.type._type_affinity is not new_type._type_affinity - and not isinstance(new_type, JSON) - ): - existing_transfer["expr"] = cast( - existing_transfer["expr"], new_type - ) - - def correct_for_autogen_constraints( - self, - conn_unique_constraints, - conn_indexes, - metadata_unique_constraints, - metadata_indexes, - ): - self._skip_functional_indexes(metadata_indexes, conn_indexes) - - -@compiles(RenameTable, "sqlite") -def visit_rename_table( - element: RenameTable, compiler: DDLCompiler, **kw -) -> str: - return "%s RENAME TO %s" % ( - alter_table(compiler, element.table_name, element.schema), - format_table_name(compiler, element.new_table_name, None), - ) - - -# @compiles(AddColumn, 'sqlite') -# def visit_add_column(element, compiler, **kw): -# return "%s %s" % ( -# alter_table(compiler, element.table_name, element.schema), -# add_column(compiler, element.column, **kw) -# ) - - -# def add_column(compiler, column, **kw): -# text = "ADD COLUMN %s" % compiler.get_column_specification(column, **kw) -# need to modify SQLAlchemy so that the CHECK associated with a Boolean -# or Enum gets placed as part of the column constraints, not the Table -# see ticket 98 -# for const in column.constraints: -# text += compiler.process(AddConstraint(const)) -# return text diff --git a/myenv/Lib/site-packages/alembic/environment.py b/myenv/Lib/site-packages/alembic/environment.py deleted file mode 100644 index adfc93e..0000000 --- a/myenv/Lib/site-packages/alembic/environment.py +++ /dev/null @@ -1 +0,0 @@ -from .runtime.environment import * # noqa diff --git a/myenv/Lib/site-packages/alembic/migration.py b/myenv/Lib/site-packages/alembic/migration.py deleted file mode 100644 index 02626e2..0000000 --- a/myenv/Lib/site-packages/alembic/migration.py +++ /dev/null @@ -1 +0,0 @@ -from .runtime.migration import * # noqa diff --git a/myenv/Lib/site-packages/alembic/op.py b/myenv/Lib/site-packages/alembic/op.py deleted file mode 100644 index f3f5fac..0000000 --- a/myenv/Lib/site-packages/alembic/op.py +++ /dev/null @@ -1,5 +0,0 @@ -from .operations.base import Operations - -# create proxy functions for -# each method on the Operations class. -Operations.create_module_class_proxy(globals(), locals()) diff --git a/myenv/Lib/site-packages/alembic/op.pyi b/myenv/Lib/site-packages/alembic/op.pyi deleted file mode 100644 index 9204446..0000000 --- a/myenv/Lib/site-packages/alembic/op.pyi +++ /dev/null @@ -1,1338 +0,0 @@ -# ### this file stubs are generated by tools/write_pyi.py - do not edit ### -# ### imports are manually managed -from __future__ import annotations - -from contextlib import contextmanager -from typing import Any -from typing import Awaitable -from typing import Callable -from typing import Dict -from typing import Iterator -from typing import List -from typing import Literal -from typing import Mapping -from typing import Optional -from typing import overload -from typing import Sequence -from typing import Tuple -from typing import Type -from typing import TYPE_CHECKING -from typing import TypeVar -from typing import Union - -if TYPE_CHECKING: - from sqlalchemy.engine import Connection - from sqlalchemy.sql import Executable - from sqlalchemy.sql.elements import ColumnElement - from sqlalchemy.sql.elements import conv - from sqlalchemy.sql.elements import TextClause - from sqlalchemy.sql.expression import TableClause - from sqlalchemy.sql.functions import Function - from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Computed - from sqlalchemy.sql.schema import Identity - from sqlalchemy.sql.schema import SchemaItem - from sqlalchemy.sql.schema import Table - from sqlalchemy.sql.type_api import TypeEngine - from sqlalchemy.util import immutabledict - - from .operations.base import BatchOperations - from .operations.ops import AddColumnOp - from .operations.ops import AddConstraintOp - from .operations.ops import AlterColumnOp - from .operations.ops import AlterTableOp - from .operations.ops import BulkInsertOp - from .operations.ops import CreateIndexOp - from .operations.ops import CreateTableCommentOp - from .operations.ops import CreateTableOp - from .operations.ops import DropColumnOp - from .operations.ops import DropConstraintOp - from .operations.ops import DropIndexOp - from .operations.ops import DropTableCommentOp - from .operations.ops import DropTableOp - from .operations.ops import ExecuteSQLOp - from .operations.ops import MigrateOperation - from .runtime.migration import MigrationContext - from .util.sqla_compat import _literal_bindparam - -_T = TypeVar("_T") -_C = TypeVar("_C", bound=Callable[..., Any]) - -### end imports ### - -def add_column( - table_name: str, column: Column[Any], *, schema: Optional[str] = None -) -> None: - """Issue an "add column" instruction using the current - migration context. - - e.g.:: - - from alembic import op - from sqlalchemy import Column, String - - op.add_column("organization", Column("name", String())) - - The :meth:`.Operations.add_column` method typically corresponds - to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope - of this command, the column's name, datatype, nullability, - and optional server-generated defaults may be indicated. - - .. note:: - - With the exception of NOT NULL constraints or single-column FOREIGN - KEY constraints, other kinds of constraints such as PRIMARY KEY, - UNIQUE or CHECK constraints **cannot** be generated using this - method; for these constraints, refer to operations such as - :meth:`.Operations.create_primary_key` and - :meth:`.Operations.create_check_constraint`. In particular, the - following :class:`~sqlalchemy.schema.Column` parameters are - **ignored**: - - * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases - typically do not support an ALTER operation that can add - individual columns one at a time to an existing primary key - constraint, therefore it's less ambiguous to use the - :meth:`.Operations.create_primary_key` method, which assumes no - existing primary key constraint is present. - * :paramref:`~sqlalchemy.schema.Column.unique` - use the - :meth:`.Operations.create_unique_constraint` method - * :paramref:`~sqlalchemy.schema.Column.index` - use the - :meth:`.Operations.create_index` method - - - The provided :class:`~sqlalchemy.schema.Column` object may include a - :class:`~sqlalchemy.schema.ForeignKey` constraint directive, - referencing a remote table name. For this specific type of constraint, - Alembic will automatically emit a second ALTER statement in order to - add the single-column FOREIGN KEY constraint separately:: - - from alembic import op - from sqlalchemy import Column, INTEGER, ForeignKey - - op.add_column( - "organization", - Column("account_id", INTEGER, ForeignKey("accounts.id")), - ) - - The column argument passed to :meth:`.Operations.add_column` is a - :class:`~sqlalchemy.schema.Column` construct, used in the same way it's - used in SQLAlchemy. In particular, values or functions to be indicated - as producing the column's default value on the database side are - specified using the ``server_default`` parameter, and not ``default`` - which only specifies Python-side defaults:: - - from alembic import op - from sqlalchemy import Column, TIMESTAMP, func - - # specify "DEFAULT NOW" along with the column add - op.add_column( - "account", - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - :param table_name: String name of the parent table. - :param column: a :class:`sqlalchemy.schema.Column` object - representing the new column. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - -def alter_column( - table_name: str, - column_name: str, - *, - nullable: Optional[bool] = None, - comment: Union[str, Literal[False], None] = False, - server_default: Any = False, - new_column_name: Optional[str] = None, - type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, - existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, - existing_server_default: Union[ - str, bool, Identity, Computed, None - ] = False, - existing_nullable: Optional[bool] = None, - existing_comment: Optional[str] = None, - schema: Optional[str] = None, - **kw: Any, -) -> None: - r"""Issue an "alter column" instruction using the - current migration context. - - Generally, only that aspect of the column which - is being changed, i.e. name, type, nullability, - default, needs to be specified. Multiple changes - can also be specified at once and the backend should - "do the right thing", emitting each change either - separately or together as the backend allows. - - MySQL has special requirements here, since MySQL - cannot ALTER a column without a full specification. - When producing MySQL-compatible migration files, - it is recommended that the ``existing_type``, - ``existing_server_default``, and ``existing_nullable`` - parameters be present, if not being altered. - - Type changes which are against the SQLAlchemy - "schema" types :class:`~sqlalchemy.types.Boolean` - and :class:`~sqlalchemy.types.Enum` may also - add or drop constraints which accompany those - types on backends that don't support them natively. - The ``existing_type`` argument is - used in this case to identify and remove a previous - constraint that was bound to the type object. - - :param table_name: string name of the target table. - :param column_name: string name of the target column, - as it exists before the operation begins. - :param nullable: Optional; specify ``True`` or ``False`` - to alter the column's nullability. - :param server_default: Optional; specify a string - SQL expression, :func:`~sqlalchemy.sql.expression.text`, - or :class:`~sqlalchemy.schema.DefaultClause` to indicate - an alteration to the column's default value. - Set to ``None`` to have the default removed. - :param comment: optional string text of a new comment to add to the - column. - :param new_column_name: Optional; specify a string name here to - indicate the new name within a column rename operation. - :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` - type object to specify a change to the column's type. - For SQLAlchemy types that also indicate a constraint (i.e. - :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), - the constraint is also generated. - :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column; - currently understood by the MySQL dialect. - :param existing_type: Optional; a - :class:`~sqlalchemy.types.TypeEngine` - type object to specify the previous type. This - is required for all MySQL column alter operations that - don't otherwise specify a new type, as well as for - when nullability is being changed on a SQL Server - column. It is also used if the type is a so-called - SQLAlchemy "schema" type which may define a constraint (i.e. - :class:`~sqlalchemy.types.Boolean`, - :class:`~sqlalchemy.types.Enum`), - so that the constraint can be dropped. - :param existing_server_default: Optional; The existing - default value of the column. Required on MySQL if - an existing default is not being changed; else MySQL - removes the default. - :param existing_nullable: Optional; the existing nullability - of the column. Required on MySQL if the existing nullability - is not being changed; else MySQL sets this to NULL. - :param existing_autoincrement: Optional; the existing autoincrement - of the column. Used for MySQL's system of altering a column - that specifies ``AUTO_INCREMENT``. - :param existing_comment: string text of the existing comment on the - column to be maintained. Required on MySQL if the existing comment - on the column is not being changed. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param postgresql_using: String argument which will indicate a - SQL expression to render within the Postgresql-specific USING clause - within ALTER COLUMN. This string is taken directly as raw SQL which - must explicitly include any necessary quoting or escaping of tokens - within the expression. - - """ - -@contextmanager -def batch_alter_table( - table_name: str, - schema: Optional[str] = None, - recreate: Literal["auto", "always", "never"] = "auto", - partial_reordering: Optional[Tuple[Any, ...]] = None, - copy_from: Optional[Table] = None, - table_args: Tuple[Any, ...] = (), - table_kwargs: Mapping[str, Any] = immutabledict({}), - reflect_args: Tuple[Any, ...] = (), - reflect_kwargs: Mapping[str, Any] = immutabledict({}), - naming_convention: Optional[Dict[str, str]] = None, -) -> Iterator[BatchOperations]: - """Invoke a series of per-table migrations in batch. - - Batch mode allows a series of operations specific to a table - to be syntactically grouped together, and allows for alternate - modes of table migration, in particular the "recreate" style of - migration required by SQLite. - - "recreate" style is as follows: - - 1. A new table is created with the new specification, based on the - migration directives within the batch, using a temporary name. - - 2. the data copied from the existing table to the new table. - - 3. the existing table is dropped. - - 4. the new table is renamed to the existing table name. - - The directive by default will only use "recreate" style on the - SQLite backend, and only if directives are present which require - this form, e.g. anything other than ``add_column()``. The batch - operation on other backends will proceed using standard ALTER TABLE - operations. - - The method is used as a context manager, which returns an instance - of :class:`.BatchOperations`; this object is the same as - :class:`.Operations` except that table names and schema names - are omitted. E.g.:: - - with op.batch_alter_table("some_table") as batch_op: - batch_op.add_column(Column("foo", Integer)) - batch_op.drop_column("bar") - - The operations within the context manager are invoked at once - when the context is ended. When run against SQLite, if the - migrations include operations not supported by SQLite's ALTER TABLE, - the entire table will be copied to a new one with the new - specification, moving all data across as well. - - The copy operation by default uses reflection to retrieve the current - structure of the table, and therefore :meth:`.batch_alter_table` - in this mode requires that the migration is run in "online" mode. - The ``copy_from`` parameter may be passed which refers to an existing - :class:`.Table` object, which will bypass this reflection step. - - .. note:: The table copy operation will currently not copy - CHECK constraints, and may not copy UNIQUE constraints that are - unnamed, as is possible on SQLite. See the section - :ref:`sqlite_batch_constraints` for workarounds. - - :param table_name: name of table - :param schema: optional schema name. - :param recreate: under what circumstances the table should be - recreated. At its default of ``"auto"``, the SQLite dialect will - recreate the table if any operations other than ``add_column()``, - ``create_index()``, or ``drop_index()`` are - present. Other options include ``"always"`` and ``"never"``. - :param copy_from: optional :class:`~sqlalchemy.schema.Table` object - that will act as the structure of the table being copied. If omitted, - table reflection is used to retrieve the structure of the table. - - .. seealso:: - - :ref:`batch_offline_mode` - - :paramref:`~.Operations.batch_alter_table.reflect_args` - - :paramref:`~.Operations.batch_alter_table.reflect_kwargs` - - :param reflect_args: a sequence of additional positional arguments that - will be applied to the table structure being reflected / copied; - this may be used to pass column and constraint overrides to the - table that will be reflected, in lieu of passing the whole - :class:`~sqlalchemy.schema.Table` using - :paramref:`~.Operations.batch_alter_table.copy_from`. - :param reflect_kwargs: a dictionary of additional keyword arguments - that will be applied to the table structure being copied; this may be - used to pass additional table and reflection options to the table that - will be reflected, in lieu of passing the whole - :class:`~sqlalchemy.schema.Table` using - :paramref:`~.Operations.batch_alter_table.copy_from`. - :param table_args: a sequence of additional positional arguments that - will be applied to the new :class:`~sqlalchemy.schema.Table` when - created, in addition to those copied from the source table. - This may be used to provide additional constraints such as CHECK - constraints that may not be reflected. - :param table_kwargs: a dictionary of additional keyword arguments - that will be applied to the new :class:`~sqlalchemy.schema.Table` - when created, in addition to those copied from the source table. - This may be used to provide for additional table options that may - not be reflected. - :param naming_convention: a naming convention dictionary of the form - described at :ref:`autogen_naming_conventions` which will be applied - to the :class:`~sqlalchemy.schema.MetaData` during the reflection - process. This is typically required if one wants to drop SQLite - constraints, as these constraints will not have names when - reflected on this backend. Requires SQLAlchemy **0.9.4** or greater. - - .. seealso:: - - :ref:`dropping_sqlite_foreign_keys` - - :param partial_reordering: a list of tuples, each suggesting a desired - ordering of two or more columns in the newly created table. Requires - that :paramref:`.batch_alter_table.recreate` is set to ``"always"``. - Examples, given a table with columns "a", "b", "c", and "d": - - Specify the order of all columns:: - - with op.batch_alter_table( - "some_table", - recreate="always", - partial_reordering=[("c", "d", "a", "b")], - ) as batch_op: - pass - - Ensure "d" appears before "c", and "b", appears before "a":: - - with op.batch_alter_table( - "some_table", - recreate="always", - partial_reordering=[("d", "c"), ("b", "a")], - ) as batch_op: - pass - - The ordering of columns not included in the partial_reordering - set is undefined. Therefore it is best to specify the complete - ordering of all columns for best results. - - .. note:: batch mode requires SQLAlchemy 0.8 or above. - - .. seealso:: - - :ref:`batch_migrations` - - """ - -def bulk_insert( - table: Union[Table, TableClause], - rows: List[Dict[str, Any]], - *, - multiinsert: bool = True, -) -> None: - """Issue a "bulk insert" operation using the current - migration context. - - This provides a means of representing an INSERT of multiple rows - which works equally well in the context of executing on a live - connection as well as that of generating a SQL script. In the - case of a SQL script, the values are rendered inline into the - statement. - - e.g.:: - - from alembic import op - from datetime import date - from sqlalchemy.sql import table, column - from sqlalchemy import String, Integer, Date - - # Create an ad-hoc table to use for the insert statement. - accounts_table = table( - "account", - column("id", Integer), - column("name", String), - column("create_date", Date), - ) - - op.bulk_insert( - accounts_table, - [ - { - "id": 1, - "name": "John Smith", - "create_date": date(2010, 10, 5), - }, - { - "id": 2, - "name": "Ed Williams", - "create_date": date(2007, 5, 27), - }, - { - "id": 3, - "name": "Wendy Jones", - "create_date": date(2008, 8, 15), - }, - ], - ) - - When using --sql mode, some datatypes may not render inline - automatically, such as dates and other special types. When this - issue is present, :meth:`.Operations.inline_literal` may be used:: - - op.bulk_insert( - accounts_table, - [ - { - "id": 1, - "name": "John Smith", - "create_date": op.inline_literal("2010-10-05"), - }, - { - "id": 2, - "name": "Ed Williams", - "create_date": op.inline_literal("2007-05-27"), - }, - { - "id": 3, - "name": "Wendy Jones", - "create_date": op.inline_literal("2008-08-15"), - }, - ], - multiinsert=False, - ) - - When using :meth:`.Operations.inline_literal` in conjunction with - :meth:`.Operations.bulk_insert`, in order for the statement to work - in "online" (e.g. non --sql) mode, the - :paramref:`~.Operations.bulk_insert.multiinsert` - flag should be set to ``False``, which will have the effect of - individual INSERT statements being emitted to the database, each - with a distinct VALUES clause, so that the "inline" values can - still be rendered, rather than attempting to pass the values - as bound parameters. - - :param table: a table object which represents the target of the INSERT. - - :param rows: a list of dictionaries indicating rows. - - :param multiinsert: when at its default of True and --sql mode is not - enabled, the INSERT statement will be executed using - "executemany()" style, where all elements in the list of - dictionaries are passed as bound parameters in a single - list. Setting this to False results in individual INSERT - statements being emitted per parameter set, and is needed - in those cases where non-literal values are present in the - parameter sets. - - """ - -def create_check_constraint( - constraint_name: Optional[str], - table_name: str, - condition: Union[str, ColumnElement[bool], TextClause], - *, - schema: Optional[str] = None, - **kw: Any, -) -> None: - """Issue a "create check constraint" instruction using the - current migration context. - - e.g.:: - - from alembic import op - from sqlalchemy.sql import column, func - - op.create_check_constraint( - "ck_user_name_len", - "user", - func.len(column("name")) > 5, - ) - - CHECK constraints are usually against a SQL expression, so ad-hoc - table metadata is usually needed. The function will convert the given - arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound - to an anonymous table in order to emit the CREATE statement. - - :param name: Name of the check constraint. The name is necessary - so that an ALTER statement can be emitted. For setups that - use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions`, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param table_name: String name of the source table. - :param condition: SQL expression that's the condition of the - constraint. Can be a string or SQLAlchemy expression language - structure. - :param deferrable: optional bool. If set, emit DEFERRABLE or - NOT DEFERRABLE when issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY - when issuing DDL for this constraint. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - -def create_exclude_constraint( - constraint_name: str, table_name: str, *elements: Any, **kw: Any -) -> Optional[Table]: - """Issue an alter to create an EXCLUDE constraint using the - current migration context. - - .. note:: This method is Postgresql specific, and additionally - requires at least SQLAlchemy 1.0. - - e.g.:: - - from alembic import op - - op.create_exclude_constraint( - "user_excl", - "user", - ("period", "&&"), - ("group", "="), - where=("group != 'some group'"), - ) - - Note that the expressions work the same way as that of - the ``ExcludeConstraint`` object itself; if plain strings are - passed, quoting rules must be applied manually. - - :param name: Name of the constraint. - :param table_name: String name of the source table. - :param elements: exclude conditions. - :param where: SQL expression or SQL string with optional WHERE - clause. - :param deferrable: optional bool. If set, emit DEFERRABLE or - NOT DEFERRABLE when issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY - when issuing DDL for this constraint. - :param schema: Optional schema name to operate within. - - """ - -def create_foreign_key( - constraint_name: Optional[str], - source_table: str, - referent_table: str, - local_cols: List[str], - remote_cols: List[str], - *, - onupdate: Optional[str] = None, - ondelete: Optional[str] = None, - deferrable: Optional[bool] = None, - initially: Optional[str] = None, - match: Optional[str] = None, - source_schema: Optional[str] = None, - referent_schema: Optional[str] = None, - **dialect_kw: Any, -) -> None: - """Issue a "create foreign key" instruction using the - current migration context. - - e.g.:: - - from alembic import op - - op.create_foreign_key( - "fk_user_address", - "address", - "user", - ["user_id"], - ["id"], - ) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.ForeignKeyConstraint` - object which it then associates with the - :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param constraint_name: Name of the foreign key constraint. The name - is necessary so that an ALTER statement can be emitted. For setups - that use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions`, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param source_table: String name of the source table. - :param referent_table: String name of the destination table. - :param local_cols: a list of string column names in the - source table. - :param remote_cols: a list of string column names in the - remote table. - :param onupdate: Optional string. If set, emit ON UPDATE when - issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. - :param ondelete: Optional string. If set, emit ON DELETE when - issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. - :param deferrable: optional bool. If set, emit DEFERRABLE or NOT - DEFERRABLE when issuing DDL for this constraint. - :param source_schema: Optional schema name of the source table. - :param referent_schema: Optional schema name of the destination table. - - """ - -def create_index( - index_name: Optional[str], - table_name: str, - columns: Sequence[Union[str, TextClause, Function[Any]]], - *, - schema: Optional[str] = None, - unique: bool = False, - if_not_exists: Optional[bool] = None, - **kw: Any, -) -> None: - r"""Issue a "create index" instruction using the current - migration context. - - e.g.:: - - from alembic import op - - op.create_index("ik_test", "t1", ["foo", "bar"]) - - Functional indexes can be produced by using the - :func:`sqlalchemy.sql.expression.text` construct:: - - from alembic import op - from sqlalchemy import text - - op.create_index("ik_test", "t1", [text("lower(foo)")]) - - :param index_name: name of the index. - :param table_name: name of the owning table. - :param columns: a list consisting of string column names and/or - :func:`~sqlalchemy.sql.expression.text` constructs. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param unique: If True, create a unique index. - - :param quote: Force quoting of this column's name on or off, - corresponding to ``True`` or ``False``. When left at its default - of ``None``, the column identifier will be quoted according to - whether the name is case sensitive (identifiers with at least one - upper case character are treated as case sensitive), or if it's a - reserved word. This flag is only needed to force quoting of a - reserved word which is not known by the SQLAlchemy dialect. - - :param if_not_exists: If True, adds IF NOT EXISTS operator when - creating the new index. - - .. versionadded:: 1.12.0 - - :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. - - """ - -def create_primary_key( - constraint_name: Optional[str], - table_name: str, - columns: List[str], - *, - schema: Optional[str] = None, -) -> None: - """Issue a "create primary key" instruction using the current - migration context. - - e.g.:: - - from alembic import op - - op.create_primary_key("pk_my_table", "my_table", ["id", "version"]) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.PrimaryKeyConstraint` - object which it then associates with the - :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param constraint_name: Name of the primary key constraint. The name - is necessary so that an ALTER statement can be emitted. For setups - that use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions` - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param table_name: String name of the target table. - :param columns: a list of string column names to be applied to the - primary key constraint. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - -def create_table( - table_name: str, - *columns: SchemaItem, - if_not_exists: Optional[bool] = None, - **kw: Any, -) -> Table: - r"""Issue a "create table" instruction using the current migration - context. - - This directive receives an argument list similar to that of the - traditional :class:`sqlalchemy.schema.Table` construct, but without the - metadata:: - - from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column - from alembic import op - - op.create_table( - "account", - Column("id", INTEGER, primary_key=True), - Column("name", VARCHAR(50), nullable=False), - Column("description", NVARCHAR(200)), - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - Note that :meth:`.create_table` accepts - :class:`~sqlalchemy.schema.Column` - constructs directly from the SQLAlchemy library. In particular, - default values to be created on the database side are - specified using the ``server_default`` parameter, and not - ``default`` which only specifies Python-side defaults:: - - from alembic import op - from sqlalchemy import Column, TIMESTAMP, func - - # specify "DEFAULT NOW" along with the "timestamp" column - op.create_table( - "account", - Column("id", INTEGER, primary_key=True), - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - The function also returns a newly created - :class:`~sqlalchemy.schema.Table` object, corresponding to the table - specification given, which is suitable for - immediate SQL operations, in particular - :meth:`.Operations.bulk_insert`:: - - from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column - from alembic import op - - account_table = op.create_table( - "account", - Column("id", INTEGER, primary_key=True), - Column("name", VARCHAR(50), nullable=False), - Column("description", NVARCHAR(200)), - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - op.bulk_insert( - account_table, - [ - {"name": "A1", "description": "account 1"}, - {"name": "A2", "description": "account 2"}, - ], - ) - - :param table_name: Name of the table - :param \*columns: collection of :class:`~sqlalchemy.schema.Column` - objects within - the table, as well as optional :class:`~sqlalchemy.schema.Constraint` - objects - and :class:`~.sqlalchemy.schema.Index` objects. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param if_not_exists: If True, adds IF NOT EXISTS operator when - creating the new table. - - .. versionadded:: 1.13.3 - :param \**kw: Other keyword arguments are passed to the underlying - :class:`sqlalchemy.schema.Table` object created for the command. - - :return: the :class:`~sqlalchemy.schema.Table` object corresponding - to the parameters given. - - """ - -def create_table_comment( - table_name: str, - comment: Optional[str], - *, - existing_comment: Optional[str] = None, - schema: Optional[str] = None, -) -> None: - """Emit a COMMENT ON operation to set the comment for a table. - - :param table_name: string name of the target table. - :param comment: string value of the comment being registered against - the specified table. - :param existing_comment: String value of a comment - already registered on the specified table, used within autogenerate - so that the operation is reversible, but not required for direct - use. - - .. seealso:: - - :meth:`.Operations.drop_table_comment` - - :paramref:`.Operations.alter_column.comment` - - """ - -def create_unique_constraint( - constraint_name: Optional[str], - table_name: str, - columns: Sequence[str], - *, - schema: Optional[str] = None, - **kw: Any, -) -> Any: - """Issue a "create unique constraint" instruction using the - current migration context. - - e.g.:: - - from alembic import op - op.create_unique_constraint("uq_user_name", "user", ["name"]) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.UniqueConstraint` - object which it then associates with the - :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param name: Name of the unique constraint. The name is necessary - so that an ALTER statement can be emitted. For setups that - use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions`, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param table_name: String name of the source table. - :param columns: a list of string column names in the - source table. - :param deferrable: optional bool. If set, emit DEFERRABLE or - NOT DEFERRABLE when issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY - when issuing DDL for this constraint. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - -def drop_column( - table_name: str, - column_name: str, - *, - schema: Optional[str] = None, - **kw: Any, -) -> None: - """Issue a "drop column" instruction using the current - migration context. - - e.g.:: - - drop_column("organization", "account_id") - - :param table_name: name of table - :param column_name: name of column - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param mssql_drop_check: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop the CHECK constraint on the column using a - SQL-script-compatible - block that selects into a @variable from sys.check_constraints, - then exec's a separate DROP CONSTRAINT for that constraint. - :param mssql_drop_default: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop the DEFAULT constraint on the column using a - SQL-script-compatible - block that selects into a @variable from sys.default_constraints, - then exec's a separate DROP CONSTRAINT for that default. - :param mssql_drop_foreign_key: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop a single FOREIGN KEY constraint on the column using a - SQL-script-compatible - block that selects into a @variable from - sys.foreign_keys/sys.foreign_key_columns, - then exec's a separate DROP CONSTRAINT for that default. Only - works if the column has exactly one FK constraint which refers to - it, at the moment. - - """ - -def drop_constraint( - constraint_name: str, - table_name: str, - type_: Optional[str] = None, - *, - schema: Optional[str] = None, -) -> None: - r"""Drop a constraint of the given name, typically via DROP CONSTRAINT. - - :param constraint_name: name of the constraint. - :param table_name: table name. - :param type\_: optional, required on MySQL. can be - 'foreignkey', 'primary', 'unique', or 'check'. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - -def drop_index( - index_name: str, - table_name: Optional[str] = None, - *, - schema: Optional[str] = None, - if_exists: Optional[bool] = None, - **kw: Any, -) -> None: - r"""Issue a "drop index" instruction using the current - migration context. - - e.g.:: - - drop_index("accounts") - - :param index_name: name of the index. - :param table_name: name of the owning table. Some - backends such as Microsoft SQL Server require this. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - :param if_exists: If True, adds IF EXISTS operator when - dropping the index. - - .. versionadded:: 1.12.0 - - :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. - - """ - -def drop_table( - table_name: str, - *, - schema: Optional[str] = None, - if_exists: Optional[bool] = None, - **kw: Any, -) -> None: - r"""Issue a "drop table" instruction using the current - migration context. - - - e.g.:: - - drop_table("accounts") - - :param table_name: Name of the table - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param if_exists: If True, adds IF EXISTS operator when - dropping the table. - - .. versionadded:: 1.13.3 - :param \**kw: Other keyword arguments are passed to the underlying - :class:`sqlalchemy.schema.Table` object created for the command. - - """ - -def drop_table_comment( - table_name: str, - *, - existing_comment: Optional[str] = None, - schema: Optional[str] = None, -) -> None: - """Issue a "drop table comment" operation to - remove an existing comment set on a table. - - :param table_name: string name of the target table. - :param existing_comment: An optional string value of a comment already - registered on the specified table. - - .. seealso:: - - :meth:`.Operations.create_table_comment` - - :paramref:`.Operations.alter_column.comment` - - """ - -def execute( - sqltext: Union[Executable, str], - *, - execution_options: Optional[dict[str, Any]] = None, -) -> None: - r"""Execute the given SQL using the current migration context. - - The given SQL can be a plain string, e.g.:: - - op.execute("INSERT INTO table (foo) VALUES ('some value')") - - Or it can be any kind of Core SQL Expression construct, such as - below where we use an update construct:: - - from sqlalchemy.sql import table, column - from sqlalchemy import String - from alembic import op - - account = table("account", column("name", String)) - op.execute( - account.update() - .where(account.c.name == op.inline_literal("account 1")) - .values({"name": op.inline_literal("account 2")}) - ) - - Above, we made use of the SQLAlchemy - :func:`sqlalchemy.sql.expression.table` and - :func:`sqlalchemy.sql.expression.column` constructs to make a brief, - ad-hoc table construct just for our UPDATE statement. A full - :class:`~sqlalchemy.schema.Table` construct of course works perfectly - fine as well, though note it's a recommended practice to at least - ensure the definition of a table is self-contained within the migration - script, rather than imported from a module that may break compatibility - with older migrations. - - In a SQL script context, the statement is emitted directly to the - output stream. There is *no* return result, however, as this - function is oriented towards generating a change script - that can run in "offline" mode. Additionally, parameterized - statements are discouraged here, as they *will not work* in offline - mode. Above, we use :meth:`.inline_literal` where parameters are - to be used. - - For full interaction with a connected database where parameters can - also be used normally, use the "bind" available from the context:: - - from alembic import op - - connection = op.get_bind() - - connection.execute( - account.update() - .where(account.c.name == "account 1") - .values({"name": "account 2"}) - ) - - Additionally, when passing the statement as a plain string, it is first - coerced into a :func:`sqlalchemy.sql.expression.text` construct - before being passed along. In the less likely case that the - literal SQL string contains a colon, it must be escaped with a - backslash, as:: - - op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')") - - - :param sqltext: Any legal SQLAlchemy expression, including: - - * a string - * a :func:`sqlalchemy.sql.expression.text` construct. - * a :func:`sqlalchemy.sql.expression.insert` construct. - * a :func:`sqlalchemy.sql.expression.update` construct. - * a :func:`sqlalchemy.sql.expression.delete` construct. - * Any "executable" described in SQLAlchemy Core documentation, - noting that no result set is returned. - - .. note:: when passing a plain string, the statement is coerced into - a :func:`sqlalchemy.sql.expression.text` construct. This construct - considers symbols with colons, e.g. ``:foo`` to be bound parameters. - To avoid this, ensure that colon symbols are escaped, e.g. - ``\:foo``. - - :param execution_options: Optional dictionary of - execution options, will be passed to - :meth:`sqlalchemy.engine.Connection.execution_options`. - """ - -def f(name: str) -> conv: - """Indicate a string name that has already had a naming convention - applied to it. - - This feature combines with the SQLAlchemy ``naming_convention`` feature - to disambiguate constraint names that have already had naming - conventions applied to them, versus those that have not. This is - necessary in the case that the ``"%(constraint_name)s"`` token - is used within a naming convention, so that it can be identified - that this particular name should remain fixed. - - If the :meth:`.Operations.f` is used on a constraint, the naming - convention will not take effect:: - - op.add_column("t", "x", Boolean(name=op.f("ck_bool_t_x"))) - - Above, the CHECK constraint generated will have the name - ``ck_bool_t_x`` regardless of whether or not a naming convention is - in use. - - Alternatively, if a naming convention is in use, and 'f' is not used, - names will be converted along conventions. If the ``target_metadata`` - contains the naming convention - ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the - output of the following: - - op.add_column("t", "x", Boolean(name="x")) - - will be:: - - CONSTRAINT ck_bool_t_x CHECK (x in (1, 0))) - - The function is rendered in the output of autogenerate when - a particular constraint name is already converted. - - """ - -def get_bind() -> Connection: - """Return the current 'bind'. - - Under normal circumstances, this is the - :class:`~sqlalchemy.engine.Connection` currently being used - to emit SQL to the database. - - In a SQL script context, this value is ``None``. [TODO: verify this] - - """ - -def get_context() -> MigrationContext: - """Return the :class:`.MigrationContext` object that's - currently in use. - - """ - -def implementation_for(op_cls: Any) -> Callable[[_C], _C]: - """Register an implementation for a given :class:`.MigrateOperation`. - - This is part of the operation extensibility API. - - .. seealso:: - - :ref:`operation_plugins` - example of use - - """ - -def inline_literal( - value: Union[str, int], type_: Optional[TypeEngine[Any]] = None -) -> _literal_bindparam: - r"""Produce an 'inline literal' expression, suitable for - using in an INSERT, UPDATE, or DELETE statement. - - When using Alembic in "offline" mode, CRUD operations - aren't compatible with SQLAlchemy's default behavior surrounding - literal values, - which is that they are converted into bound values and passed - separately into the ``execute()`` method of the DBAPI cursor. - An offline SQL - script needs to have these rendered inline. While it should - always be noted that inline literal values are an **enormous** - security hole in an application that handles untrusted input, - a schema migration is not run in this context, so - literals are safe to render inline, with the caveat that - advanced types like dates may not be supported directly - by SQLAlchemy. - - See :meth:`.Operations.execute` for an example usage of - :meth:`.Operations.inline_literal`. - - The environment can also be configured to attempt to render - "literal" values inline automatically, for those simple types - that are supported by the dialect; see - :paramref:`.EnvironmentContext.configure.literal_binds` for this - more recently added feature. - - :param value: The value to render. Strings, integers, and simple - numerics should be supported. Other types like boolean, - dates, etc. may or may not be supported yet by various - backends. - :param type\_: optional - a :class:`sqlalchemy.types.TypeEngine` - subclass stating the type of this value. In SQLAlchemy - expressions, this is usually derived automatically - from the Python type of the value itself, as well as - based on the context in which the value is used. - - .. seealso:: - - :paramref:`.EnvironmentContext.configure.literal_binds` - - """ - -@overload -def invoke(operation: CreateTableOp) -> Table: ... -@overload -def invoke( - operation: Union[ - AddConstraintOp, - DropConstraintOp, - CreateIndexOp, - DropIndexOp, - AddColumnOp, - AlterColumnOp, - AlterTableOp, - CreateTableCommentOp, - DropTableCommentOp, - DropColumnOp, - BulkInsertOp, - DropTableOp, - ExecuteSQLOp, - ] -) -> None: ... -@overload -def invoke(operation: MigrateOperation) -> Any: - """Given a :class:`.MigrateOperation`, invoke it in terms of - this :class:`.Operations` instance. - - """ - -def register_operation( - name: str, sourcename: Optional[str] = None -) -> Callable[[Type[_T]], Type[_T]]: - """Register a new operation for this class. - - This method is normally used to add new operations - to the :class:`.Operations` class, and possibly the - :class:`.BatchOperations` class as well. All Alembic migration - operations are implemented via this system, however the system - is also available as a public API to facilitate adding custom - operations. - - .. seealso:: - - :ref:`operation_plugins` - - - """ - -def rename_table( - old_table_name: str, new_table_name: str, *, schema: Optional[str] = None -) -> None: - """Emit an ALTER TABLE to rename a table. - - :param old_table_name: old name. - :param new_table_name: new name. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - -def run_async( - async_function: Callable[..., Awaitable[_T]], *args: Any, **kw_args: Any -) -> _T: - """Invoke the given asynchronous callable, passing an asynchronous - :class:`~sqlalchemy.ext.asyncio.AsyncConnection` as the first - argument. - - This method allows calling async functions from within the - synchronous ``upgrade()`` or ``downgrade()`` alembic migration - method. - - The async connection passed to the callable shares the same - transaction as the connection running in the migration context. - - Any additional arg or kw_arg passed to this function are passed - to the provided async function. - - .. versionadded: 1.11 - - .. note:: - - This method can be called only when alembic is called using - an async dialect. - """ diff --git a/myenv/Lib/site-packages/alembic/operations/__init__.py b/myenv/Lib/site-packages/alembic/operations/__init__.py deleted file mode 100644 index 26197cb..0000000 --- a/myenv/Lib/site-packages/alembic/operations/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from . import toimpl -from .base import AbstractOperations -from .base import BatchOperations -from .base import Operations -from .ops import MigrateOperation -from .ops import MigrationScript - - -__all__ = [ - "AbstractOperations", - "Operations", - "BatchOperations", - "MigrateOperation", - "MigrationScript", -] diff --git a/myenv/Lib/site-packages/alembic/operations/base.py b/myenv/Lib/site-packages/alembic/operations/base.py deleted file mode 100644 index 9b52fa6..0000000 --- a/myenv/Lib/site-packages/alembic/operations/base.py +++ /dev/null @@ -1,1907 +0,0 @@ -# mypy: allow-untyped-calls - -from __future__ import annotations - -from contextlib import contextmanager -import re -import textwrap -from typing import Any -from typing import Awaitable -from typing import Callable -from typing import Dict -from typing import Iterator -from typing import List # noqa -from typing import Mapping -from typing import NoReturn -from typing import Optional -from typing import overload -from typing import Sequence # noqa -from typing import Tuple -from typing import Type # noqa -from typing import TYPE_CHECKING -from typing import TypeVar -from typing import Union - -from sqlalchemy.sql.elements import conv - -from . import batch -from . import schemaobj -from .. import util -from ..util import sqla_compat -from ..util.compat import formatannotation_fwdref -from ..util.compat import inspect_formatargspec -from ..util.compat import inspect_getfullargspec -from ..util.sqla_compat import _literal_bindparam - - -if TYPE_CHECKING: - from typing import Literal - - from sqlalchemy import Table - from sqlalchemy.engine import Connection - from sqlalchemy.sql import Executable - from sqlalchemy.sql.expression import ColumnElement - from sqlalchemy.sql.expression import TableClause - from sqlalchemy.sql.expression import TextClause - from sqlalchemy.sql.functions import Function - from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Computed - from sqlalchemy.sql.schema import Identity - from sqlalchemy.sql.schema import SchemaItem - from sqlalchemy.types import TypeEngine - - from .batch import BatchOperationsImpl - from .ops import AddColumnOp - from .ops import AddConstraintOp - from .ops import AlterColumnOp - from .ops import AlterTableOp - from .ops import BulkInsertOp - from .ops import CreateIndexOp - from .ops import CreateTableCommentOp - from .ops import CreateTableOp - from .ops import DropColumnOp - from .ops import DropConstraintOp - from .ops import DropIndexOp - from .ops import DropTableCommentOp - from .ops import DropTableOp - from .ops import ExecuteSQLOp - from .ops import MigrateOperation - from ..ddl import DefaultImpl - from ..runtime.migration import MigrationContext -__all__ = ("Operations", "BatchOperations") -_T = TypeVar("_T") - -_C = TypeVar("_C", bound=Callable[..., Any]) - - -class AbstractOperations(util.ModuleClsProxy): - """Base class for Operations and BatchOperations. - - .. versionadded:: 1.11.0 - - """ - - impl: Union[DefaultImpl, BatchOperationsImpl] - _to_impl = util.Dispatcher() - - def __init__( - self, - migration_context: MigrationContext, - impl: Optional[BatchOperationsImpl] = None, - ) -> None: - """Construct a new :class:`.Operations` - - :param migration_context: a :class:`.MigrationContext` - instance. - - """ - self.migration_context = migration_context - if impl is None: - self.impl = migration_context.impl - else: - self.impl = impl - - self.schema_obj = schemaobj.SchemaObjects(migration_context) - - @classmethod - def register_operation( - cls, name: str, sourcename: Optional[str] = None - ) -> Callable[[Type[_T]], Type[_T]]: - """Register a new operation for this class. - - This method is normally used to add new operations - to the :class:`.Operations` class, and possibly the - :class:`.BatchOperations` class as well. All Alembic migration - operations are implemented via this system, however the system - is also available as a public API to facilitate adding custom - operations. - - .. seealso:: - - :ref:`operation_plugins` - - - """ - - def register(op_cls: Type[_T]) -> Type[_T]: - if sourcename is None: - fn = getattr(op_cls, name) - source_name = fn.__name__ - else: - fn = getattr(op_cls, sourcename) - source_name = fn.__name__ - - spec = inspect_getfullargspec(fn) - - name_args = spec[0] - assert name_args[0:2] == ["cls", "operations"] - - name_args[0:2] = ["self"] - - args = inspect_formatargspec( - *spec, formatannotation=formatannotation_fwdref - ) - num_defaults = len(spec[3]) if spec[3] else 0 - - defaulted_vals: Tuple[Any, ...] - - if num_defaults: - defaulted_vals = tuple(name_args[0 - num_defaults :]) - else: - defaulted_vals = () - - defaulted_vals += tuple(spec[4]) - # here, we are using formatargspec in a different way in order - # to get a string that will re-apply incoming arguments to a new - # function call - - apply_kw = inspect_formatargspec( - name_args + spec[4], - spec[1], - spec[2], - defaulted_vals, - formatvalue=lambda x: "=" + x, - formatannotation=formatannotation_fwdref, - ) - - args = re.sub( - r'[_]?ForwardRef\(([\'"].+?[\'"])\)', - lambda m: m.group(1), - args, - ) - - func_text = textwrap.dedent( - """\ - def %(name)s%(args)s: - %(doc)r - return op_cls.%(source_name)s%(apply_kw)s - """ - % { - "name": name, - "source_name": source_name, - "args": args, - "apply_kw": apply_kw, - "doc": fn.__doc__, - } - ) - - globals_ = dict(globals()) - globals_.update({"op_cls": op_cls}) - lcl: Dict[str, Any] = {} - - exec(func_text, globals_, lcl) - setattr(cls, name, lcl[name]) - fn.__func__.__doc__ = ( - "This method is proxied on " - "the :class:`.%s` class, via the :meth:`.%s.%s` method." - % (cls.__name__, cls.__name__, name) - ) - if hasattr(fn, "_legacy_translations"): - lcl[name]._legacy_translations = fn._legacy_translations - return op_cls - - return register - - @classmethod - def implementation_for(cls, op_cls: Any) -> Callable[[_C], _C]: - """Register an implementation for a given :class:`.MigrateOperation`. - - This is part of the operation extensibility API. - - .. seealso:: - - :ref:`operation_plugins` - example of use - - """ - - def decorate(fn: _C) -> _C: - cls._to_impl.dispatch_for(op_cls)(fn) - return fn - - return decorate - - @classmethod - @contextmanager - def context( - cls, migration_context: MigrationContext - ) -> Iterator[Operations]: - op = Operations(migration_context) - op._install_proxy() - yield op - op._remove_proxy() - - @contextmanager - def batch_alter_table( - self, - table_name: str, - schema: Optional[str] = None, - recreate: Literal["auto", "always", "never"] = "auto", - partial_reordering: Optional[Tuple[Any, ...]] = None, - copy_from: Optional[Table] = None, - table_args: Tuple[Any, ...] = (), - table_kwargs: Mapping[str, Any] = util.immutabledict(), - reflect_args: Tuple[Any, ...] = (), - reflect_kwargs: Mapping[str, Any] = util.immutabledict(), - naming_convention: Optional[Dict[str, str]] = None, - ) -> Iterator[BatchOperations]: - """Invoke a series of per-table migrations in batch. - - Batch mode allows a series of operations specific to a table - to be syntactically grouped together, and allows for alternate - modes of table migration, in particular the "recreate" style of - migration required by SQLite. - - "recreate" style is as follows: - - 1. A new table is created with the new specification, based on the - migration directives within the batch, using a temporary name. - - 2. the data copied from the existing table to the new table. - - 3. the existing table is dropped. - - 4. the new table is renamed to the existing table name. - - The directive by default will only use "recreate" style on the - SQLite backend, and only if directives are present which require - this form, e.g. anything other than ``add_column()``. The batch - operation on other backends will proceed using standard ALTER TABLE - operations. - - The method is used as a context manager, which returns an instance - of :class:`.BatchOperations`; this object is the same as - :class:`.Operations` except that table names and schema names - are omitted. E.g.:: - - with op.batch_alter_table("some_table") as batch_op: - batch_op.add_column(Column("foo", Integer)) - batch_op.drop_column("bar") - - The operations within the context manager are invoked at once - when the context is ended. When run against SQLite, if the - migrations include operations not supported by SQLite's ALTER TABLE, - the entire table will be copied to a new one with the new - specification, moving all data across as well. - - The copy operation by default uses reflection to retrieve the current - structure of the table, and therefore :meth:`.batch_alter_table` - in this mode requires that the migration is run in "online" mode. - The ``copy_from`` parameter may be passed which refers to an existing - :class:`.Table` object, which will bypass this reflection step. - - .. note:: The table copy operation will currently not copy - CHECK constraints, and may not copy UNIQUE constraints that are - unnamed, as is possible on SQLite. See the section - :ref:`sqlite_batch_constraints` for workarounds. - - :param table_name: name of table - :param schema: optional schema name. - :param recreate: under what circumstances the table should be - recreated. At its default of ``"auto"``, the SQLite dialect will - recreate the table if any operations other than ``add_column()``, - ``create_index()``, or ``drop_index()`` are - present. Other options include ``"always"`` and ``"never"``. - :param copy_from: optional :class:`~sqlalchemy.schema.Table` object - that will act as the structure of the table being copied. If omitted, - table reflection is used to retrieve the structure of the table. - - .. seealso:: - - :ref:`batch_offline_mode` - - :paramref:`~.Operations.batch_alter_table.reflect_args` - - :paramref:`~.Operations.batch_alter_table.reflect_kwargs` - - :param reflect_args: a sequence of additional positional arguments that - will be applied to the table structure being reflected / copied; - this may be used to pass column and constraint overrides to the - table that will be reflected, in lieu of passing the whole - :class:`~sqlalchemy.schema.Table` using - :paramref:`~.Operations.batch_alter_table.copy_from`. - :param reflect_kwargs: a dictionary of additional keyword arguments - that will be applied to the table structure being copied; this may be - used to pass additional table and reflection options to the table that - will be reflected, in lieu of passing the whole - :class:`~sqlalchemy.schema.Table` using - :paramref:`~.Operations.batch_alter_table.copy_from`. - :param table_args: a sequence of additional positional arguments that - will be applied to the new :class:`~sqlalchemy.schema.Table` when - created, in addition to those copied from the source table. - This may be used to provide additional constraints such as CHECK - constraints that may not be reflected. - :param table_kwargs: a dictionary of additional keyword arguments - that will be applied to the new :class:`~sqlalchemy.schema.Table` - when created, in addition to those copied from the source table. - This may be used to provide for additional table options that may - not be reflected. - :param naming_convention: a naming convention dictionary of the form - described at :ref:`autogen_naming_conventions` which will be applied - to the :class:`~sqlalchemy.schema.MetaData` during the reflection - process. This is typically required if one wants to drop SQLite - constraints, as these constraints will not have names when - reflected on this backend. Requires SQLAlchemy **0.9.4** or greater. - - .. seealso:: - - :ref:`dropping_sqlite_foreign_keys` - - :param partial_reordering: a list of tuples, each suggesting a desired - ordering of two or more columns in the newly created table. Requires - that :paramref:`.batch_alter_table.recreate` is set to ``"always"``. - Examples, given a table with columns "a", "b", "c", and "d": - - Specify the order of all columns:: - - with op.batch_alter_table( - "some_table", - recreate="always", - partial_reordering=[("c", "d", "a", "b")], - ) as batch_op: - pass - - Ensure "d" appears before "c", and "b", appears before "a":: - - with op.batch_alter_table( - "some_table", - recreate="always", - partial_reordering=[("d", "c"), ("b", "a")], - ) as batch_op: - pass - - The ordering of columns not included in the partial_reordering - set is undefined. Therefore it is best to specify the complete - ordering of all columns for best results. - - .. note:: batch mode requires SQLAlchemy 0.8 or above. - - .. seealso:: - - :ref:`batch_migrations` - - """ - impl = batch.BatchOperationsImpl( - self, - table_name, - schema, - recreate, - copy_from, - table_args, - table_kwargs, - reflect_args, - reflect_kwargs, - naming_convention, - partial_reordering, - ) - batch_op = BatchOperations(self.migration_context, impl=impl) - yield batch_op - impl.flush() - - def get_context(self) -> MigrationContext: - """Return the :class:`.MigrationContext` object that's - currently in use. - - """ - - return self.migration_context - - @overload - def invoke(self, operation: CreateTableOp) -> Table: ... - - @overload - def invoke( - self, - operation: Union[ - AddConstraintOp, - DropConstraintOp, - CreateIndexOp, - DropIndexOp, - AddColumnOp, - AlterColumnOp, - AlterTableOp, - CreateTableCommentOp, - DropTableCommentOp, - DropColumnOp, - BulkInsertOp, - DropTableOp, - ExecuteSQLOp, - ], - ) -> None: ... - - @overload - def invoke(self, operation: MigrateOperation) -> Any: ... - - def invoke(self, operation: MigrateOperation) -> Any: - """Given a :class:`.MigrateOperation`, invoke it in terms of - this :class:`.Operations` instance. - - """ - fn = self._to_impl.dispatch( - operation, self.migration_context.impl.__dialect__ - ) - return fn(self, operation) - - def f(self, name: str) -> conv: - """Indicate a string name that has already had a naming convention - applied to it. - - This feature combines with the SQLAlchemy ``naming_convention`` feature - to disambiguate constraint names that have already had naming - conventions applied to them, versus those that have not. This is - necessary in the case that the ``"%(constraint_name)s"`` token - is used within a naming convention, so that it can be identified - that this particular name should remain fixed. - - If the :meth:`.Operations.f` is used on a constraint, the naming - convention will not take effect:: - - op.add_column("t", "x", Boolean(name=op.f("ck_bool_t_x"))) - - Above, the CHECK constraint generated will have the name - ``ck_bool_t_x`` regardless of whether or not a naming convention is - in use. - - Alternatively, if a naming convention is in use, and 'f' is not used, - names will be converted along conventions. If the ``target_metadata`` - contains the naming convention - ``{"ck": "ck_bool_%(table_name)s_%(constraint_name)s"}``, then the - output of the following: - - op.add_column("t", "x", Boolean(name="x")) - - will be:: - - CONSTRAINT ck_bool_t_x CHECK (x in (1, 0))) - - The function is rendered in the output of autogenerate when - a particular constraint name is already converted. - - """ - return conv(name) - - def inline_literal( - self, value: Union[str, int], type_: Optional[TypeEngine[Any]] = None - ) -> _literal_bindparam: - r"""Produce an 'inline literal' expression, suitable for - using in an INSERT, UPDATE, or DELETE statement. - - When using Alembic in "offline" mode, CRUD operations - aren't compatible with SQLAlchemy's default behavior surrounding - literal values, - which is that they are converted into bound values and passed - separately into the ``execute()`` method of the DBAPI cursor. - An offline SQL - script needs to have these rendered inline. While it should - always be noted that inline literal values are an **enormous** - security hole in an application that handles untrusted input, - a schema migration is not run in this context, so - literals are safe to render inline, with the caveat that - advanced types like dates may not be supported directly - by SQLAlchemy. - - See :meth:`.Operations.execute` for an example usage of - :meth:`.Operations.inline_literal`. - - The environment can also be configured to attempt to render - "literal" values inline automatically, for those simple types - that are supported by the dialect; see - :paramref:`.EnvironmentContext.configure.literal_binds` for this - more recently added feature. - - :param value: The value to render. Strings, integers, and simple - numerics should be supported. Other types like boolean, - dates, etc. may or may not be supported yet by various - backends. - :param type\_: optional - a :class:`sqlalchemy.types.TypeEngine` - subclass stating the type of this value. In SQLAlchemy - expressions, this is usually derived automatically - from the Python type of the value itself, as well as - based on the context in which the value is used. - - .. seealso:: - - :paramref:`.EnvironmentContext.configure.literal_binds` - - """ - return sqla_compat._literal_bindparam(None, value, type_=type_) - - def get_bind(self) -> Connection: - """Return the current 'bind'. - - Under normal circumstances, this is the - :class:`~sqlalchemy.engine.Connection` currently being used - to emit SQL to the database. - - In a SQL script context, this value is ``None``. [TODO: verify this] - - """ - return self.migration_context.impl.bind # type: ignore[return-value] - - def run_async( - self, - async_function: Callable[..., Awaitable[_T]], - *args: Any, - **kw_args: Any, - ) -> _T: - """Invoke the given asynchronous callable, passing an asynchronous - :class:`~sqlalchemy.ext.asyncio.AsyncConnection` as the first - argument. - - This method allows calling async functions from within the - synchronous ``upgrade()`` or ``downgrade()`` alembic migration - method. - - The async connection passed to the callable shares the same - transaction as the connection running in the migration context. - - Any additional arg or kw_arg passed to this function are passed - to the provided async function. - - .. versionadded: 1.11 - - .. note:: - - This method can be called only when alembic is called using - an async dialect. - """ - if not sqla_compat.sqla_14_18: - raise NotImplementedError("SQLAlchemy 1.4.18+ required") - sync_conn = self.get_bind() - if sync_conn is None: - raise NotImplementedError("Cannot call run_async in SQL mode") - if not sync_conn.dialect.is_async: - raise ValueError("Cannot call run_async with a sync engine") - from sqlalchemy.ext.asyncio import AsyncConnection - from sqlalchemy.util import await_only - - async_conn = AsyncConnection._retrieve_proxy_for_target(sync_conn) - return await_only(async_function(async_conn, *args, **kw_args)) - - -class Operations(AbstractOperations): - """Define high level migration operations. - - Each operation corresponds to some schema migration operation, - executed against a particular :class:`.MigrationContext` - which in turn represents connectivity to a database, - or a file output stream. - - While :class:`.Operations` is normally configured as - part of the :meth:`.EnvironmentContext.run_migrations` - method called from an ``env.py`` script, a standalone - :class:`.Operations` instance can be - made for use cases external to regular Alembic - migrations by passing in a :class:`.MigrationContext`:: - - from alembic.migration import MigrationContext - from alembic.operations import Operations - - conn = myengine.connect() - ctx = MigrationContext.configure(conn) - op = Operations(ctx) - - op.alter_column("t", "c", nullable=True) - - Note that as of 0.8, most of the methods on this class are produced - dynamically using the :meth:`.Operations.register_operation` - method. - - """ - - if TYPE_CHECKING: - # START STUB FUNCTIONS: op_cls - # ### the following stubs are generated by tools/write_pyi.py ### - # ### do not edit ### - - def add_column( - self, - table_name: str, - column: Column[Any], - *, - schema: Optional[str] = None, - ) -> None: - """Issue an "add column" instruction using the current - migration context. - - e.g.:: - - from alembic import op - from sqlalchemy import Column, String - - op.add_column("organization", Column("name", String())) - - The :meth:`.Operations.add_column` method typically corresponds - to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope - of this command, the column's name, datatype, nullability, - and optional server-generated defaults may be indicated. - - .. note:: - - With the exception of NOT NULL constraints or single-column FOREIGN - KEY constraints, other kinds of constraints such as PRIMARY KEY, - UNIQUE or CHECK constraints **cannot** be generated using this - method; for these constraints, refer to operations such as - :meth:`.Operations.create_primary_key` and - :meth:`.Operations.create_check_constraint`. In particular, the - following :class:`~sqlalchemy.schema.Column` parameters are - **ignored**: - - * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases - typically do not support an ALTER operation that can add - individual columns one at a time to an existing primary key - constraint, therefore it's less ambiguous to use the - :meth:`.Operations.create_primary_key` method, which assumes no - existing primary key constraint is present. - * :paramref:`~sqlalchemy.schema.Column.unique` - use the - :meth:`.Operations.create_unique_constraint` method - * :paramref:`~sqlalchemy.schema.Column.index` - use the - :meth:`.Operations.create_index` method - - - The provided :class:`~sqlalchemy.schema.Column` object may include a - :class:`~sqlalchemy.schema.ForeignKey` constraint directive, - referencing a remote table name. For this specific type of constraint, - Alembic will automatically emit a second ALTER statement in order to - add the single-column FOREIGN KEY constraint separately:: - - from alembic import op - from sqlalchemy import Column, INTEGER, ForeignKey - - op.add_column( - "organization", - Column("account_id", INTEGER, ForeignKey("accounts.id")), - ) - - The column argument passed to :meth:`.Operations.add_column` is a - :class:`~sqlalchemy.schema.Column` construct, used in the same way it's - used in SQLAlchemy. In particular, values or functions to be indicated - as producing the column's default value on the database side are - specified using the ``server_default`` parameter, and not ``default`` - which only specifies Python-side defaults:: - - from alembic import op - from sqlalchemy import Column, TIMESTAMP, func - - # specify "DEFAULT NOW" along with the column add - op.add_column( - "account", - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - :param table_name: String name of the parent table. - :param column: a :class:`sqlalchemy.schema.Column` object - representing the new column. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ # noqa: E501 - ... - - def alter_column( - self, - table_name: str, - column_name: str, - *, - nullable: Optional[bool] = None, - comment: Union[str, Literal[False], None] = False, - server_default: Any = False, - new_column_name: Optional[str] = None, - type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, - existing_type: Union[ - TypeEngine[Any], Type[TypeEngine[Any]], None - ] = None, - existing_server_default: Union[ - str, bool, Identity, Computed, None - ] = False, - existing_nullable: Optional[bool] = None, - existing_comment: Optional[str] = None, - schema: Optional[str] = None, - **kw: Any, - ) -> None: - r"""Issue an "alter column" instruction using the - current migration context. - - Generally, only that aspect of the column which - is being changed, i.e. name, type, nullability, - default, needs to be specified. Multiple changes - can also be specified at once and the backend should - "do the right thing", emitting each change either - separately or together as the backend allows. - - MySQL has special requirements here, since MySQL - cannot ALTER a column without a full specification. - When producing MySQL-compatible migration files, - it is recommended that the ``existing_type``, - ``existing_server_default``, and ``existing_nullable`` - parameters be present, if not being altered. - - Type changes which are against the SQLAlchemy - "schema" types :class:`~sqlalchemy.types.Boolean` - and :class:`~sqlalchemy.types.Enum` may also - add or drop constraints which accompany those - types on backends that don't support them natively. - The ``existing_type`` argument is - used in this case to identify and remove a previous - constraint that was bound to the type object. - - :param table_name: string name of the target table. - :param column_name: string name of the target column, - as it exists before the operation begins. - :param nullable: Optional; specify ``True`` or ``False`` - to alter the column's nullability. - :param server_default: Optional; specify a string - SQL expression, :func:`~sqlalchemy.sql.expression.text`, - or :class:`~sqlalchemy.schema.DefaultClause` to indicate - an alteration to the column's default value. - Set to ``None`` to have the default removed. - :param comment: optional string text of a new comment to add to the - column. - :param new_column_name: Optional; specify a string name here to - indicate the new name within a column rename operation. - :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` - type object to specify a change to the column's type. - For SQLAlchemy types that also indicate a constraint (i.e. - :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), - the constraint is also generated. - :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column; - currently understood by the MySQL dialect. - :param existing_type: Optional; a - :class:`~sqlalchemy.types.TypeEngine` - type object to specify the previous type. This - is required for all MySQL column alter operations that - don't otherwise specify a new type, as well as for - when nullability is being changed on a SQL Server - column. It is also used if the type is a so-called - SQLAlchemy "schema" type which may define a constraint (i.e. - :class:`~sqlalchemy.types.Boolean`, - :class:`~sqlalchemy.types.Enum`), - so that the constraint can be dropped. - :param existing_server_default: Optional; The existing - default value of the column. Required on MySQL if - an existing default is not being changed; else MySQL - removes the default. - :param existing_nullable: Optional; the existing nullability - of the column. Required on MySQL if the existing nullability - is not being changed; else MySQL sets this to NULL. - :param existing_autoincrement: Optional; the existing autoincrement - of the column. Used for MySQL's system of altering a column - that specifies ``AUTO_INCREMENT``. - :param existing_comment: string text of the existing comment on the - column to be maintained. Required on MySQL if the existing comment - on the column is not being changed. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param postgresql_using: String argument which will indicate a - SQL expression to render within the Postgresql-specific USING clause - within ALTER COLUMN. This string is taken directly as raw SQL which - must explicitly include any necessary quoting or escaping of tokens - within the expression. - - """ # noqa: E501 - ... - - def bulk_insert( - self, - table: Union[Table, TableClause], - rows: List[Dict[str, Any]], - *, - multiinsert: bool = True, - ) -> None: - """Issue a "bulk insert" operation using the current - migration context. - - This provides a means of representing an INSERT of multiple rows - which works equally well in the context of executing on a live - connection as well as that of generating a SQL script. In the - case of a SQL script, the values are rendered inline into the - statement. - - e.g.:: - - from alembic import op - from datetime import date - from sqlalchemy.sql import table, column - from sqlalchemy import String, Integer, Date - - # Create an ad-hoc table to use for the insert statement. - accounts_table = table( - "account", - column("id", Integer), - column("name", String), - column("create_date", Date), - ) - - op.bulk_insert( - accounts_table, - [ - { - "id": 1, - "name": "John Smith", - "create_date": date(2010, 10, 5), - }, - { - "id": 2, - "name": "Ed Williams", - "create_date": date(2007, 5, 27), - }, - { - "id": 3, - "name": "Wendy Jones", - "create_date": date(2008, 8, 15), - }, - ], - ) - - When using --sql mode, some datatypes may not render inline - automatically, such as dates and other special types. When this - issue is present, :meth:`.Operations.inline_literal` may be used:: - - op.bulk_insert( - accounts_table, - [ - { - "id": 1, - "name": "John Smith", - "create_date": op.inline_literal("2010-10-05"), - }, - { - "id": 2, - "name": "Ed Williams", - "create_date": op.inline_literal("2007-05-27"), - }, - { - "id": 3, - "name": "Wendy Jones", - "create_date": op.inline_literal("2008-08-15"), - }, - ], - multiinsert=False, - ) - - When using :meth:`.Operations.inline_literal` in conjunction with - :meth:`.Operations.bulk_insert`, in order for the statement to work - in "online" (e.g. non --sql) mode, the - :paramref:`~.Operations.bulk_insert.multiinsert` - flag should be set to ``False``, which will have the effect of - individual INSERT statements being emitted to the database, each - with a distinct VALUES clause, so that the "inline" values can - still be rendered, rather than attempting to pass the values - as bound parameters. - - :param table: a table object which represents the target of the INSERT. - - :param rows: a list of dictionaries indicating rows. - - :param multiinsert: when at its default of True and --sql mode is not - enabled, the INSERT statement will be executed using - "executemany()" style, where all elements in the list of - dictionaries are passed as bound parameters in a single - list. Setting this to False results in individual INSERT - statements being emitted per parameter set, and is needed - in those cases where non-literal values are present in the - parameter sets. - - """ # noqa: E501 - ... - - def create_check_constraint( - self, - constraint_name: Optional[str], - table_name: str, - condition: Union[str, ColumnElement[bool], TextClause], - *, - schema: Optional[str] = None, - **kw: Any, - ) -> None: - """Issue a "create check constraint" instruction using the - current migration context. - - e.g.:: - - from alembic import op - from sqlalchemy.sql import column, func - - op.create_check_constraint( - "ck_user_name_len", - "user", - func.len(column("name")) > 5, - ) - - CHECK constraints are usually against a SQL expression, so ad-hoc - table metadata is usually needed. The function will convert the given - arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound - to an anonymous table in order to emit the CREATE statement. - - :param name: Name of the check constraint. The name is necessary - so that an ALTER statement can be emitted. For setups that - use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions`, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param table_name: String name of the source table. - :param condition: SQL expression that's the condition of the - constraint. Can be a string or SQLAlchemy expression language - structure. - :param deferrable: optional bool. If set, emit DEFERRABLE or - NOT DEFERRABLE when issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY - when issuing DDL for this constraint. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ # noqa: E501 - ... - - def create_exclude_constraint( - self, - constraint_name: str, - table_name: str, - *elements: Any, - **kw: Any, - ) -> Optional[Table]: - """Issue an alter to create an EXCLUDE constraint using the - current migration context. - - .. note:: This method is Postgresql specific, and additionally - requires at least SQLAlchemy 1.0. - - e.g.:: - - from alembic import op - - op.create_exclude_constraint( - "user_excl", - "user", - ("period", "&&"), - ("group", "="), - where=("group != 'some group'"), - ) - - Note that the expressions work the same way as that of - the ``ExcludeConstraint`` object itself; if plain strings are - passed, quoting rules must be applied manually. - - :param name: Name of the constraint. - :param table_name: String name of the source table. - :param elements: exclude conditions. - :param where: SQL expression or SQL string with optional WHERE - clause. - :param deferrable: optional bool. If set, emit DEFERRABLE or - NOT DEFERRABLE when issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY - when issuing DDL for this constraint. - :param schema: Optional schema name to operate within. - - """ # noqa: E501 - ... - - def create_foreign_key( - self, - constraint_name: Optional[str], - source_table: str, - referent_table: str, - local_cols: List[str], - remote_cols: List[str], - *, - onupdate: Optional[str] = None, - ondelete: Optional[str] = None, - deferrable: Optional[bool] = None, - initially: Optional[str] = None, - match: Optional[str] = None, - source_schema: Optional[str] = None, - referent_schema: Optional[str] = None, - **dialect_kw: Any, - ) -> None: - """Issue a "create foreign key" instruction using the - current migration context. - - e.g.:: - - from alembic import op - - op.create_foreign_key( - "fk_user_address", - "address", - "user", - ["user_id"], - ["id"], - ) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.ForeignKeyConstraint` - object which it then associates with the - :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param constraint_name: Name of the foreign key constraint. The name - is necessary so that an ALTER statement can be emitted. For setups - that use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions`, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param source_table: String name of the source table. - :param referent_table: String name of the destination table. - :param local_cols: a list of string column names in the - source table. - :param remote_cols: a list of string column names in the - remote table. - :param onupdate: Optional string. If set, emit ON UPDATE when - issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. - :param ondelete: Optional string. If set, emit ON DELETE when - issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. - :param deferrable: optional bool. If set, emit DEFERRABLE or NOT - DEFERRABLE when issuing DDL for this constraint. - :param source_schema: Optional schema name of the source table. - :param referent_schema: Optional schema name of the destination table. - - """ # noqa: E501 - ... - - def create_index( - self, - index_name: Optional[str], - table_name: str, - columns: Sequence[Union[str, TextClause, Function[Any]]], - *, - schema: Optional[str] = None, - unique: bool = False, - if_not_exists: Optional[bool] = None, - **kw: Any, - ) -> None: - r"""Issue a "create index" instruction using the current - migration context. - - e.g.:: - - from alembic import op - - op.create_index("ik_test", "t1", ["foo", "bar"]) - - Functional indexes can be produced by using the - :func:`sqlalchemy.sql.expression.text` construct:: - - from alembic import op - from sqlalchemy import text - - op.create_index("ik_test", "t1", [text("lower(foo)")]) - - :param index_name: name of the index. - :param table_name: name of the owning table. - :param columns: a list consisting of string column names and/or - :func:`~sqlalchemy.sql.expression.text` constructs. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param unique: If True, create a unique index. - - :param quote: Force quoting of this column's name on or off, - corresponding to ``True`` or ``False``. When left at its default - of ``None``, the column identifier will be quoted according to - whether the name is case sensitive (identifiers with at least one - upper case character are treated as case sensitive), or if it's a - reserved word. This flag is only needed to force quoting of a - reserved word which is not known by the SQLAlchemy dialect. - - :param if_not_exists: If True, adds IF NOT EXISTS operator when - creating the new index. - - .. versionadded:: 1.12.0 - - :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. - - """ # noqa: E501 - ... - - def create_primary_key( - self, - constraint_name: Optional[str], - table_name: str, - columns: List[str], - *, - schema: Optional[str] = None, - ) -> None: - """Issue a "create primary key" instruction using the current - migration context. - - e.g.:: - - from alembic import op - - op.create_primary_key("pk_my_table", "my_table", ["id", "version"]) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.PrimaryKeyConstraint` - object which it then associates with the - :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param constraint_name: Name of the primary key constraint. The name - is necessary so that an ALTER statement can be emitted. For setups - that use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions` - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param table_name: String name of the target table. - :param columns: a list of string column names to be applied to the - primary key constraint. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ # noqa: E501 - ... - - def create_table( - self, - table_name: str, - *columns: SchemaItem, - if_not_exists: Optional[bool] = None, - **kw: Any, - ) -> Table: - r"""Issue a "create table" instruction using the current migration - context. - - This directive receives an argument list similar to that of the - traditional :class:`sqlalchemy.schema.Table` construct, but without the - metadata:: - - from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column - from alembic import op - - op.create_table( - "account", - Column("id", INTEGER, primary_key=True), - Column("name", VARCHAR(50), nullable=False), - Column("description", NVARCHAR(200)), - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - Note that :meth:`.create_table` accepts - :class:`~sqlalchemy.schema.Column` - constructs directly from the SQLAlchemy library. In particular, - default values to be created on the database side are - specified using the ``server_default`` parameter, and not - ``default`` which only specifies Python-side defaults:: - - from alembic import op - from sqlalchemy import Column, TIMESTAMP, func - - # specify "DEFAULT NOW" along with the "timestamp" column - op.create_table( - "account", - Column("id", INTEGER, primary_key=True), - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - The function also returns a newly created - :class:`~sqlalchemy.schema.Table` object, corresponding to the table - specification given, which is suitable for - immediate SQL operations, in particular - :meth:`.Operations.bulk_insert`:: - - from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column - from alembic import op - - account_table = op.create_table( - "account", - Column("id", INTEGER, primary_key=True), - Column("name", VARCHAR(50), nullable=False), - Column("description", NVARCHAR(200)), - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - op.bulk_insert( - account_table, - [ - {"name": "A1", "description": "account 1"}, - {"name": "A2", "description": "account 2"}, - ], - ) - - :param table_name: Name of the table - :param \*columns: collection of :class:`~sqlalchemy.schema.Column` - objects within - the table, as well as optional :class:`~sqlalchemy.schema.Constraint` - objects - and :class:`~.sqlalchemy.schema.Index` objects. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param if_not_exists: If True, adds IF NOT EXISTS operator when - creating the new table. - - .. versionadded:: 1.13.3 - :param \**kw: Other keyword arguments are passed to the underlying - :class:`sqlalchemy.schema.Table` object created for the command. - - :return: the :class:`~sqlalchemy.schema.Table` object corresponding - to the parameters given. - - """ # noqa: E501 - ... - - def create_table_comment( - self, - table_name: str, - comment: Optional[str], - *, - existing_comment: Optional[str] = None, - schema: Optional[str] = None, - ) -> None: - """Emit a COMMENT ON operation to set the comment for a table. - - :param table_name: string name of the target table. - :param comment: string value of the comment being registered against - the specified table. - :param existing_comment: String value of a comment - already registered on the specified table, used within autogenerate - so that the operation is reversible, but not required for direct - use. - - .. seealso:: - - :meth:`.Operations.drop_table_comment` - - :paramref:`.Operations.alter_column.comment` - - """ # noqa: E501 - ... - - def create_unique_constraint( - self, - constraint_name: Optional[str], - table_name: str, - columns: Sequence[str], - *, - schema: Optional[str] = None, - **kw: Any, - ) -> Any: - """Issue a "create unique constraint" instruction using the - current migration context. - - e.g.:: - - from alembic import op - op.create_unique_constraint("uq_user_name", "user", ["name"]) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.UniqueConstraint` - object which it then associates with the - :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param name: Name of the unique constraint. The name is necessary - so that an ALTER statement can be emitted. For setups that - use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions`, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param table_name: String name of the source table. - :param columns: a list of string column names in the - source table. - :param deferrable: optional bool. If set, emit DEFERRABLE or - NOT DEFERRABLE when issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY - when issuing DDL for this constraint. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ # noqa: E501 - ... - - def drop_column( - self, - table_name: str, - column_name: str, - *, - schema: Optional[str] = None, - **kw: Any, - ) -> None: - """Issue a "drop column" instruction using the current - migration context. - - e.g.:: - - drop_column("organization", "account_id") - - :param table_name: name of table - :param column_name: name of column - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param mssql_drop_check: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop the CHECK constraint on the column using a - SQL-script-compatible - block that selects into a @variable from sys.check_constraints, - then exec's a separate DROP CONSTRAINT for that constraint. - :param mssql_drop_default: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop the DEFAULT constraint on the column using a - SQL-script-compatible - block that selects into a @variable from sys.default_constraints, - then exec's a separate DROP CONSTRAINT for that default. - :param mssql_drop_foreign_key: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop a single FOREIGN KEY constraint on the column using a - SQL-script-compatible - block that selects into a @variable from - sys.foreign_keys/sys.foreign_key_columns, - then exec's a separate DROP CONSTRAINT for that default. Only - works if the column has exactly one FK constraint which refers to - it, at the moment. - - """ # noqa: E501 - ... - - def drop_constraint( - self, - constraint_name: str, - table_name: str, - type_: Optional[str] = None, - *, - schema: Optional[str] = None, - ) -> None: - r"""Drop a constraint of the given name, typically via DROP CONSTRAINT. - - :param constraint_name: name of the constraint. - :param table_name: table name. - :param type\_: optional, required on MySQL. can be - 'foreignkey', 'primary', 'unique', or 'check'. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ # noqa: E501 - ... - - def drop_index( - self, - index_name: str, - table_name: Optional[str] = None, - *, - schema: Optional[str] = None, - if_exists: Optional[bool] = None, - **kw: Any, - ) -> None: - r"""Issue a "drop index" instruction using the current - migration context. - - e.g.:: - - drop_index("accounts") - - :param index_name: name of the index. - :param table_name: name of the owning table. Some - backends such as Microsoft SQL Server require this. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - :param if_exists: If True, adds IF EXISTS operator when - dropping the index. - - .. versionadded:: 1.12.0 - - :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. - - """ # noqa: E501 - ... - - def drop_table( - self, - table_name: str, - *, - schema: Optional[str] = None, - if_exists: Optional[bool] = None, - **kw: Any, - ) -> None: - r"""Issue a "drop table" instruction using the current - migration context. - - - e.g.:: - - drop_table("accounts") - - :param table_name: Name of the table - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param if_exists: If True, adds IF EXISTS operator when - dropping the table. - - .. versionadded:: 1.13.3 - :param \**kw: Other keyword arguments are passed to the underlying - :class:`sqlalchemy.schema.Table` object created for the command. - - """ # noqa: E501 - ... - - def drop_table_comment( - self, - table_name: str, - *, - existing_comment: Optional[str] = None, - schema: Optional[str] = None, - ) -> None: - """Issue a "drop table comment" operation to - remove an existing comment set on a table. - - :param table_name: string name of the target table. - :param existing_comment: An optional string value of a comment already - registered on the specified table. - - .. seealso:: - - :meth:`.Operations.create_table_comment` - - :paramref:`.Operations.alter_column.comment` - - """ # noqa: E501 - ... - - def execute( - self, - sqltext: Union[Executable, str], - *, - execution_options: Optional[dict[str, Any]] = None, - ) -> None: - r"""Execute the given SQL using the current migration context. - - The given SQL can be a plain string, e.g.:: - - op.execute("INSERT INTO table (foo) VALUES ('some value')") - - Or it can be any kind of Core SQL Expression construct, such as - below where we use an update construct:: - - from sqlalchemy.sql import table, column - from sqlalchemy import String - from alembic import op - - account = table("account", column("name", String)) - op.execute( - account.update() - .where(account.c.name == op.inline_literal("account 1")) - .values({"name": op.inline_literal("account 2")}) - ) - - Above, we made use of the SQLAlchemy - :func:`sqlalchemy.sql.expression.table` and - :func:`sqlalchemy.sql.expression.column` constructs to make a brief, - ad-hoc table construct just for our UPDATE statement. A full - :class:`~sqlalchemy.schema.Table` construct of course works perfectly - fine as well, though note it's a recommended practice to at least - ensure the definition of a table is self-contained within the migration - script, rather than imported from a module that may break compatibility - with older migrations. - - In a SQL script context, the statement is emitted directly to the - output stream. There is *no* return result, however, as this - function is oriented towards generating a change script - that can run in "offline" mode. Additionally, parameterized - statements are discouraged here, as they *will not work* in offline - mode. Above, we use :meth:`.inline_literal` where parameters are - to be used. - - For full interaction with a connected database where parameters can - also be used normally, use the "bind" available from the context:: - - from alembic import op - - connection = op.get_bind() - - connection.execute( - account.update() - .where(account.c.name == "account 1") - .values({"name": "account 2"}) - ) - - Additionally, when passing the statement as a plain string, it is first - coerced into a :func:`sqlalchemy.sql.expression.text` construct - before being passed along. In the less likely case that the - literal SQL string contains a colon, it must be escaped with a - backslash, as:: - - op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')") - - - :param sqltext: Any legal SQLAlchemy expression, including: - - * a string - * a :func:`sqlalchemy.sql.expression.text` construct. - * a :func:`sqlalchemy.sql.expression.insert` construct. - * a :func:`sqlalchemy.sql.expression.update` construct. - * a :func:`sqlalchemy.sql.expression.delete` construct. - * Any "executable" described in SQLAlchemy Core documentation, - noting that no result set is returned. - - .. note:: when passing a plain string, the statement is coerced into - a :func:`sqlalchemy.sql.expression.text` construct. This construct - considers symbols with colons, e.g. ``:foo`` to be bound parameters. - To avoid this, ensure that colon symbols are escaped, e.g. - ``\:foo``. - - :param execution_options: Optional dictionary of - execution options, will be passed to - :meth:`sqlalchemy.engine.Connection.execution_options`. - """ # noqa: E501 - ... - - def rename_table( - self, - old_table_name: str, - new_table_name: str, - *, - schema: Optional[str] = None, - ) -> None: - """Emit an ALTER TABLE to rename a table. - - :param old_table_name: old name. - :param new_table_name: new name. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ # noqa: E501 - ... - - # END STUB FUNCTIONS: op_cls - - -class BatchOperations(AbstractOperations): - """Modifies the interface :class:`.Operations` for batch mode. - - This basically omits the ``table_name`` and ``schema`` parameters - from associated methods, as these are a given when running under batch - mode. - - .. seealso:: - - :meth:`.Operations.batch_alter_table` - - Note that as of 0.8, most of the methods on this class are produced - dynamically using the :meth:`.Operations.register_operation` - method. - - """ - - impl: BatchOperationsImpl - - def _noop(self, operation: Any) -> NoReturn: - raise NotImplementedError( - "The %s method does not apply to a batch table alter operation." - % operation - ) - - if TYPE_CHECKING: - # START STUB FUNCTIONS: batch_op - # ### the following stubs are generated by tools/write_pyi.py ### - # ### do not edit ### - - def add_column( - self, - column: Column[Any], - *, - insert_before: Optional[str] = None, - insert_after: Optional[str] = None, - ) -> None: - """Issue an "add column" instruction using the current - batch migration context. - - .. seealso:: - - :meth:`.Operations.add_column` - - """ # noqa: E501 - ... - - def alter_column( - self, - column_name: str, - *, - nullable: Optional[bool] = None, - comment: Union[str, Literal[False], None] = False, - server_default: Any = False, - new_column_name: Optional[str] = None, - type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = None, - existing_type: Union[ - TypeEngine[Any], Type[TypeEngine[Any]], None - ] = None, - existing_server_default: Union[ - str, bool, Identity, Computed, None - ] = False, - existing_nullable: Optional[bool] = None, - existing_comment: Optional[str] = None, - insert_before: Optional[str] = None, - insert_after: Optional[str] = None, - **kw: Any, - ) -> None: - """Issue an "alter column" instruction using the current - batch migration context. - - Parameters are the same as that of :meth:`.Operations.alter_column`, - as well as the following option(s): - - :param insert_before: String name of an existing column which this - column should be placed before, when creating the new table. - - :param insert_after: String name of an existing column which this - column should be placed after, when creating the new table. If - both :paramref:`.BatchOperations.alter_column.insert_before` - and :paramref:`.BatchOperations.alter_column.insert_after` are - omitted, the column is inserted after the last existing column - in the table. - - .. seealso:: - - :meth:`.Operations.alter_column` - - - """ # noqa: E501 - ... - - def create_check_constraint( - self, - constraint_name: str, - condition: Union[str, ColumnElement[bool], TextClause], - **kw: Any, - ) -> None: - """Issue a "create check constraint" instruction using the - current batch migration context. - - The batch form of this call omits the ``source`` and ``schema`` - arguments from the call. - - .. seealso:: - - :meth:`.Operations.create_check_constraint` - - """ # noqa: E501 - ... - - def create_exclude_constraint( - self, constraint_name: str, *elements: Any, **kw: Any - ) -> Optional[Table]: - """Issue a "create exclude constraint" instruction using the - current batch migration context. - - .. note:: This method is Postgresql specific, and additionally - requires at least SQLAlchemy 1.0. - - .. seealso:: - - :meth:`.Operations.create_exclude_constraint` - - """ # noqa: E501 - ... - - def create_foreign_key( - self, - constraint_name: Optional[str], - referent_table: str, - local_cols: List[str], - remote_cols: List[str], - *, - referent_schema: Optional[str] = None, - onupdate: Optional[str] = None, - ondelete: Optional[str] = None, - deferrable: Optional[bool] = None, - initially: Optional[str] = None, - match: Optional[str] = None, - **dialect_kw: Any, - ) -> None: - """Issue a "create foreign key" instruction using the - current batch migration context. - - The batch form of this call omits the ``source`` and ``source_schema`` - arguments from the call. - - e.g.:: - - with batch_alter_table("address") as batch_op: - batch_op.create_foreign_key( - "fk_user_address", - "user", - ["user_id"], - ["id"], - ) - - .. seealso:: - - :meth:`.Operations.create_foreign_key` - - """ # noqa: E501 - ... - - def create_index( - self, index_name: str, columns: List[str], **kw: Any - ) -> None: - """Issue a "create index" instruction using the - current batch migration context. - - .. seealso:: - - :meth:`.Operations.create_index` - - """ # noqa: E501 - ... - - def create_primary_key( - self, constraint_name: Optional[str], columns: List[str] - ) -> None: - """Issue a "create primary key" instruction using the - current batch migration context. - - The batch form of this call omits the ``table_name`` and ``schema`` - arguments from the call. - - .. seealso:: - - :meth:`.Operations.create_primary_key` - - """ # noqa: E501 - ... - - def create_table_comment( - self, - comment: Optional[str], - *, - existing_comment: Optional[str] = None, - ) -> None: - """Emit a COMMENT ON operation to set the comment for a table - using the current batch migration context. - - :param comment: string value of the comment being registered against - the specified table. - :param existing_comment: String value of a comment - already registered on the specified table, used within autogenerate - so that the operation is reversible, but not required for direct - use. - - """ # noqa: E501 - ... - - def create_unique_constraint( - self, constraint_name: str, columns: Sequence[str], **kw: Any - ) -> Any: - """Issue a "create unique constraint" instruction using the - current batch migration context. - - The batch form of this call omits the ``source`` and ``schema`` - arguments from the call. - - .. seealso:: - - :meth:`.Operations.create_unique_constraint` - - """ # noqa: E501 - ... - - def drop_column(self, column_name: str, **kw: Any) -> None: - """Issue a "drop column" instruction using the current - batch migration context. - - .. seealso:: - - :meth:`.Operations.drop_column` - - """ # noqa: E501 - ... - - def drop_constraint( - self, constraint_name: str, type_: Optional[str] = None - ) -> None: - """Issue a "drop constraint" instruction using the - current batch migration context. - - The batch form of this call omits the ``table_name`` and ``schema`` - arguments from the call. - - .. seealso:: - - :meth:`.Operations.drop_constraint` - - """ # noqa: E501 - ... - - def drop_index(self, index_name: str, **kw: Any) -> None: - """Issue a "drop index" instruction using the - current batch migration context. - - .. seealso:: - - :meth:`.Operations.drop_index` - - """ # noqa: E501 - ... - - def drop_table_comment( - self, *, existing_comment: Optional[str] = None - ) -> None: - """Issue a "drop table comment" operation to - remove an existing comment set on a table using the current - batch operations context. - - :param existing_comment: An optional string value of a comment already - registered on the specified table. - - """ # noqa: E501 - ... - - def execute( - self, - sqltext: Union[Executable, str], - *, - execution_options: Optional[dict[str, Any]] = None, - ) -> None: - """Execute the given SQL using the current migration context. - - .. seealso:: - - :meth:`.Operations.execute` - - """ # noqa: E501 - ... - - # END STUB FUNCTIONS: batch_op diff --git a/myenv/Lib/site-packages/alembic/operations/batch.py b/myenv/Lib/site-packages/alembic/operations/batch.py deleted file mode 100644 index fd7ab99..0000000 --- a/myenv/Lib/site-packages/alembic/operations/batch.py +++ /dev/null @@ -1,717 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Tuple -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy import CheckConstraint -from sqlalchemy import Column -from sqlalchemy import ForeignKeyConstraint -from sqlalchemy import Index -from sqlalchemy import MetaData -from sqlalchemy import PrimaryKeyConstraint -from sqlalchemy import schema as sql_schema -from sqlalchemy import Table -from sqlalchemy import types as sqltypes -from sqlalchemy.sql.schema import SchemaEventTarget -from sqlalchemy.util import OrderedDict -from sqlalchemy.util import topological - -from ..util import exc -from ..util.sqla_compat import _columns_for_constraint -from ..util.sqla_compat import _copy -from ..util.sqla_compat import _copy_expression -from ..util.sqla_compat import _ensure_scope_for_ddl -from ..util.sqla_compat import _fk_is_self_referential -from ..util.sqla_compat import _idx_table_bound_expressions -from ..util.sqla_compat import _insert_inline -from ..util.sqla_compat import _is_type_bound -from ..util.sqla_compat import _remove_column_from_collection -from ..util.sqla_compat import _resolve_for_variant -from ..util.sqla_compat import _select -from ..util.sqla_compat import constraint_name_defined -from ..util.sqla_compat import constraint_name_string - -if TYPE_CHECKING: - from typing import Literal - - from sqlalchemy.engine import Dialect - from sqlalchemy.sql.elements import ColumnClause - from sqlalchemy.sql.elements import quoted_name - from sqlalchemy.sql.functions import Function - from sqlalchemy.sql.schema import Constraint - from sqlalchemy.sql.type_api import TypeEngine - - from ..ddl.impl import DefaultImpl - - -class BatchOperationsImpl: - def __init__( - self, - operations, - table_name, - schema, - recreate, - copy_from, - table_args, - table_kwargs, - reflect_args, - reflect_kwargs, - naming_convention, - partial_reordering, - ): - self.operations = operations - self.table_name = table_name - self.schema = schema - if recreate not in ("auto", "always", "never"): - raise ValueError( - "recreate may be one of 'auto', 'always', or 'never'." - ) - self.recreate = recreate - self.copy_from = copy_from - self.table_args = table_args - self.table_kwargs = dict(table_kwargs) - self.reflect_args = reflect_args - self.reflect_kwargs = dict(reflect_kwargs) - self.reflect_kwargs.setdefault( - "listeners", list(self.reflect_kwargs.get("listeners", ())) - ) - self.reflect_kwargs["listeners"].append( - ("column_reflect", operations.impl.autogen_column_reflect) - ) - self.naming_convention = naming_convention - self.partial_reordering = partial_reordering - self.batch = [] - - @property - def dialect(self) -> Dialect: - return self.operations.impl.dialect - - @property - def impl(self) -> DefaultImpl: - return self.operations.impl - - def _should_recreate(self) -> bool: - if self.recreate == "auto": - return self.operations.impl.requires_recreate_in_batch(self) - elif self.recreate == "always": - return True - else: - return False - - def flush(self) -> None: - should_recreate = self._should_recreate() - - with _ensure_scope_for_ddl(self.impl.connection): - if not should_recreate: - for opname, arg, kw in self.batch: - fn = getattr(self.operations.impl, opname) - fn(*arg, **kw) - else: - if self.naming_convention: - m1 = MetaData(naming_convention=self.naming_convention) - else: - m1 = MetaData() - - if self.copy_from is not None: - existing_table = self.copy_from - reflected = False - else: - if self.operations.migration_context.as_sql: - raise exc.CommandError( - f"This operation cannot proceed in --sql mode; " - f"batch mode with dialect " - f"{self.operations.migration_context.dialect.name} " # noqa: E501 - f"requires a live database connection with which " - f'to reflect the table "{self.table_name}". ' - f"To generate a batch SQL migration script using " - "table " - '"move and copy", a complete Table object ' - f'should be passed to the "copy_from" argument ' - "of the batch_alter_table() method so that table " - "reflection can be skipped." - ) - - existing_table = Table( - self.table_name, - m1, - schema=self.schema, - autoload_with=self.operations.get_bind(), - *self.reflect_args, - **self.reflect_kwargs, - ) - reflected = True - - batch_impl = ApplyBatchImpl( - self.impl, - existing_table, - self.table_args, - self.table_kwargs, - reflected, - partial_reordering=self.partial_reordering, - ) - for opname, arg, kw in self.batch: - fn = getattr(batch_impl, opname) - fn(*arg, **kw) - - batch_impl._create(self.impl) - - def alter_column(self, *arg, **kw) -> None: - self.batch.append(("alter_column", arg, kw)) - - def add_column(self, *arg, **kw) -> None: - if ( - "insert_before" in kw or "insert_after" in kw - ) and not self._should_recreate(): - raise exc.CommandError( - "Can't specify insert_before or insert_after when using " - "ALTER; please specify recreate='always'" - ) - self.batch.append(("add_column", arg, kw)) - - def drop_column(self, *arg, **kw) -> None: - self.batch.append(("drop_column", arg, kw)) - - def add_constraint(self, const: Constraint) -> None: - self.batch.append(("add_constraint", (const,), {})) - - def drop_constraint(self, const: Constraint) -> None: - self.batch.append(("drop_constraint", (const,), {})) - - def rename_table(self, *arg, **kw): - self.batch.append(("rename_table", arg, kw)) - - def create_index(self, idx: Index, **kw: Any) -> None: - self.batch.append(("create_index", (idx,), kw)) - - def drop_index(self, idx: Index, **kw: Any) -> None: - self.batch.append(("drop_index", (idx,), kw)) - - def create_table_comment(self, table): - self.batch.append(("create_table_comment", (table,), {})) - - def drop_table_comment(self, table): - self.batch.append(("drop_table_comment", (table,), {})) - - def create_table(self, table): - raise NotImplementedError("Can't create table in batch mode") - - def drop_table(self, table): - raise NotImplementedError("Can't drop table in batch mode") - - def create_column_comment(self, column): - self.batch.append(("create_column_comment", (column,), {})) - - -class ApplyBatchImpl: - def __init__( - self, - impl: DefaultImpl, - table: Table, - table_args: tuple, - table_kwargs: Dict[str, Any], - reflected: bool, - partial_reordering: tuple = (), - ) -> None: - self.impl = impl - self.table = table # this is a Table object - self.table_args = table_args - self.table_kwargs = table_kwargs - self.temp_table_name = self._calc_temp_name(table.name) - self.new_table: Optional[Table] = None - - self.partial_reordering = partial_reordering # tuple of tuples - self.add_col_ordering: Tuple[ - Tuple[str, str], ... - ] = () # tuple of tuples - - self.column_transfers = OrderedDict( - (c.name, {"expr": c}) for c in self.table.c - ) - self.existing_ordering = list(self.column_transfers) - - self.reflected = reflected - self._grab_table_elements() - - @classmethod - def _calc_temp_name(cls, tablename: Union[quoted_name, str]) -> str: - return ("_alembic_tmp_%s" % tablename)[0:50] - - def _grab_table_elements(self) -> None: - schema = self.table.schema - self.columns: Dict[str, Column[Any]] = OrderedDict() - for c in self.table.c: - c_copy = _copy(c, schema=schema) - c_copy.unique = c_copy.index = False - # ensure that the type object was copied, - # as we may need to modify it in-place - if isinstance(c.type, SchemaEventTarget): - assert c_copy.type is not c.type - self.columns[c.name] = c_copy - self.named_constraints: Dict[str, Constraint] = {} - self.unnamed_constraints = [] - self.col_named_constraints = {} - self.indexes: Dict[str, Index] = {} - self.new_indexes: Dict[str, Index] = {} - - for const in self.table.constraints: - if _is_type_bound(const): - continue - elif ( - self.reflected - and isinstance(const, CheckConstraint) - and not const.name - ): - # TODO: we are skipping unnamed reflected CheckConstraint - # because - # we have no way to determine _is_type_bound() for these. - pass - elif constraint_name_string(const.name): - self.named_constraints[const.name] = const - else: - self.unnamed_constraints.append(const) - - if not self.reflected: - for col in self.table.c: - for const in col.constraints: - if const.name: - self.col_named_constraints[const.name] = (col, const) - - for idx in self.table.indexes: - self.indexes[idx.name] = idx # type: ignore[index] - - for k in self.table.kwargs: - self.table_kwargs.setdefault(k, self.table.kwargs[k]) - - def _adjust_self_columns_for_partial_reordering(self) -> None: - pairs = set() - - col_by_idx = list(self.columns) - - if self.partial_reordering: - for tuple_ in self.partial_reordering: - for index, elem in enumerate(tuple_): - if index > 0: - pairs.add((tuple_[index - 1], elem)) - else: - for index, elem in enumerate(self.existing_ordering): - if index > 0: - pairs.add((col_by_idx[index - 1], elem)) - - pairs.update(self.add_col_ordering) - - # this can happen if some columns were dropped and not removed - # from existing_ordering. this should be prevented already, but - # conservatively making sure this didn't happen - pairs_list = [p for p in pairs if p[0] != p[1]] - - sorted_ = list( - topological.sort(pairs_list, col_by_idx, deterministic_order=True) - ) - self.columns = OrderedDict((k, self.columns[k]) for k in sorted_) - self.column_transfers = OrderedDict( - (k, self.column_transfers[k]) for k in sorted_ - ) - - def _transfer_elements_to_new_table(self) -> None: - assert self.new_table is None, "Can only create new table once" - - m = MetaData() - schema = self.table.schema - - if self.partial_reordering or self.add_col_ordering: - self._adjust_self_columns_for_partial_reordering() - - self.new_table = new_table = Table( - self.temp_table_name, - m, - *(list(self.columns.values()) + list(self.table_args)), - schema=schema, - **self.table_kwargs, - ) - - for const in ( - list(self.named_constraints.values()) + self.unnamed_constraints - ): - const_columns = {c.key for c in _columns_for_constraint(const)} - - if not const_columns.issubset(self.column_transfers): - continue - - const_copy: Constraint - if isinstance(const, ForeignKeyConstraint): - if _fk_is_self_referential(const): - # for self-referential constraint, refer to the - # *original* table name, and not _alembic_batch_temp. - # This is consistent with how we're handling - # FK constraints from other tables; we assume SQLite - # no foreign keys just keeps the names unchanged, so - # when we rename back, they match again. - const_copy = _copy( - const, schema=schema, target_table=self.table - ) - else: - # "target_table" for ForeignKeyConstraint.copy() is - # only used if the FK is detected as being - # self-referential, which we are handling above. - const_copy = _copy(const, schema=schema) - else: - const_copy = _copy( - const, schema=schema, target_table=new_table - ) - if isinstance(const, ForeignKeyConstraint): - self._setup_referent(m, const) - new_table.append_constraint(const_copy) - - def _gather_indexes_from_both_tables(self) -> List[Index]: - assert self.new_table is not None - idx: List[Index] = [] - - for idx_existing in self.indexes.values(): - # this is a lift-and-move from Table.to_metadata - - if idx_existing._column_flag: - continue - - idx_copy = Index( - idx_existing.name, - unique=idx_existing.unique, - *[ - _copy_expression(expr, self.new_table) - for expr in _idx_table_bound_expressions(idx_existing) - ], - _table=self.new_table, - **idx_existing.kwargs, - ) - idx.append(idx_copy) - - for index in self.new_indexes.values(): - idx.append( - Index( - index.name, - unique=index.unique, - *[self.new_table.c[col] for col in index.columns.keys()], - **index.kwargs, - ) - ) - return idx - - def _setup_referent( - self, metadata: MetaData, constraint: ForeignKeyConstraint - ) -> None: - spec = constraint.elements[0]._get_colspec() - parts = spec.split(".") - tname = parts[-2] - if len(parts) == 3: - referent_schema = parts[0] - else: - referent_schema = None - - if tname != self.temp_table_name: - key = sql_schema._get_table_key(tname, referent_schema) - - def colspec(elem: Any): - return elem._get_colspec() - - if key in metadata.tables: - t = metadata.tables[key] - for elem in constraint.elements: - colname = colspec(elem).split(".")[-1] - if colname not in t.c: - t.append_column(Column(colname, sqltypes.NULLTYPE)) - else: - Table( - tname, - metadata, - *[ - Column(n, sqltypes.NULLTYPE) - for n in [ - colspec(elem).split(".")[-1] - for elem in constraint.elements - ] - ], - schema=referent_schema, - ) - - def _create(self, op_impl: DefaultImpl) -> None: - self._transfer_elements_to_new_table() - - op_impl.prep_table_for_batch(self, self.table) - assert self.new_table is not None - op_impl.create_table(self.new_table) - - try: - op_impl._exec( - _insert_inline(self.new_table).from_select( - list( - k - for k, transfer in self.column_transfers.items() - if "expr" in transfer - ), - _select( - *[ - transfer["expr"] - for transfer in self.column_transfers.values() - if "expr" in transfer - ] - ), - ) - ) - op_impl.drop_table(self.table) - except: - op_impl.drop_table(self.new_table) - raise - else: - op_impl.rename_table( - self.temp_table_name, self.table.name, schema=self.table.schema - ) - self.new_table.name = self.table.name - try: - for idx in self._gather_indexes_from_both_tables(): - op_impl.create_index(idx) - finally: - self.new_table.name = self.temp_table_name - - def alter_column( - self, - table_name: str, - column_name: str, - nullable: Optional[bool] = None, - server_default: Optional[Union[Function[Any], str, bool]] = False, - name: Optional[str] = None, - type_: Optional[TypeEngine] = None, - autoincrement: Optional[Union[bool, Literal["auto"]]] = None, - comment: Union[str, Literal[False]] = False, - **kw, - ) -> None: - existing = self.columns[column_name] - existing_transfer: Dict[str, Any] = self.column_transfers[column_name] - if name is not None and name != column_name: - # note that we don't change '.key' - we keep referring - # to the renamed column by its old key in _create(). neat! - existing.name = name - existing_transfer["name"] = name - - existing_type = kw.get("existing_type", None) - if existing_type: - resolved_existing_type = _resolve_for_variant( - kw["existing_type"], self.impl.dialect - ) - - # pop named constraints for Boolean/Enum for rename - if ( - isinstance(resolved_existing_type, SchemaEventTarget) - and resolved_existing_type.name # type:ignore[attr-defined] # noqa E501 - ): - self.named_constraints.pop( - resolved_existing_type.name, # type:ignore[attr-defined] # noqa E501 - None, - ) - - if type_ is not None: - type_ = sqltypes.to_instance(type_) - # old type is being discarded so turn off eventing - # rules. Alternatively we can - # erase the events set up by this type, but this is simpler. - # we also ignore the drop_constraint that will come here from - # Operations.implementation_for(alter_column) - - if isinstance(existing.type, SchemaEventTarget): - existing.type._create_events = ( # type:ignore[attr-defined] - existing.type.create_constraint # type:ignore[attr-defined] # noqa - ) = False - - self.impl.cast_for_batch_migrate( - existing, existing_transfer, type_ - ) - - existing.type = type_ - - # we *dont* however set events for the new type, because - # alter_column is invoked from - # Operations.implementation_for(alter_column) which already - # will emit an add_constraint() - - if nullable is not None: - existing.nullable = nullable - if server_default is not False: - if server_default is None: - existing.server_default = None - else: - sql_schema.DefaultClause( - server_default # type: ignore[arg-type] - )._set_parent(existing) - if autoincrement is not None: - existing.autoincrement = bool(autoincrement) - - if comment is not False: - existing.comment = comment - - def _setup_dependencies_for_add_column( - self, - colname: str, - insert_before: Optional[str], - insert_after: Optional[str], - ) -> None: - index_cols = self.existing_ordering - col_indexes = {name: i for i, name in enumerate(index_cols)} - - if not self.partial_reordering: - if insert_after: - if not insert_before: - if insert_after in col_indexes: - # insert after an existing column - idx = col_indexes[insert_after] + 1 - if idx < len(index_cols): - insert_before = index_cols[idx] - else: - # insert after a column that is also new - insert_before = dict(self.add_col_ordering)[ - insert_after - ] - if insert_before: - if not insert_after: - if insert_before in col_indexes: - # insert before an existing column - idx = col_indexes[insert_before] - 1 - if idx >= 0: - insert_after = index_cols[idx] - else: - # insert before a column that is also new - insert_after = { - b: a for a, b in self.add_col_ordering - }[insert_before] - - if insert_before: - self.add_col_ordering += ((colname, insert_before),) - if insert_after: - self.add_col_ordering += ((insert_after, colname),) - - if ( - not self.partial_reordering - and not insert_before - and not insert_after - and col_indexes - ): - self.add_col_ordering += ((index_cols[-1], colname),) - - def add_column( - self, - table_name: str, - column: Column[Any], - insert_before: Optional[str] = None, - insert_after: Optional[str] = None, - **kw, - ) -> None: - self._setup_dependencies_for_add_column( - column.name, insert_before, insert_after - ) - # we copy the column because operations.add_column() - # gives us a Column that is part of a Table already. - self.columns[column.name] = _copy(column, schema=self.table.schema) - self.column_transfers[column.name] = {} - - def drop_column( - self, - table_name: str, - column: Union[ColumnClause[Any], Column[Any]], - **kw, - ) -> None: - if column.name in self.table.primary_key.columns: - _remove_column_from_collection( - self.table.primary_key.columns, column - ) - del self.columns[column.name] - del self.column_transfers[column.name] - self.existing_ordering.remove(column.name) - - # pop named constraints for Boolean/Enum for rename - if ( - "existing_type" in kw - and isinstance(kw["existing_type"], SchemaEventTarget) - and kw["existing_type"].name # type:ignore[attr-defined] - ): - self.named_constraints.pop( - kw["existing_type"].name, None # type:ignore[attr-defined] - ) - - def create_column_comment(self, column): - """the batch table creation function will issue create_column_comment - on the real "impl" as part of the create table process. - - That is, the Column object will have the comment on it already, - so when it is received by add_column() it will be a normal part of - the CREATE TABLE and doesn't need an extra step here. - - """ - - def create_table_comment(self, table): - """the batch table creation function will issue create_table_comment - on the real "impl" as part of the create table process. - - """ - - def drop_table_comment(self, table): - """the batch table creation function will issue drop_table_comment - on the real "impl" as part of the create table process. - - """ - - def add_constraint(self, const: Constraint) -> None: - if not constraint_name_defined(const.name): - raise ValueError("Constraint must have a name") - if isinstance(const, sql_schema.PrimaryKeyConstraint): - if self.table.primary_key in self.unnamed_constraints: - self.unnamed_constraints.remove(self.table.primary_key) - - if constraint_name_string(const.name): - self.named_constraints[const.name] = const - else: - self.unnamed_constraints.append(const) - - def drop_constraint(self, const: Constraint) -> None: - if not const.name: - raise ValueError("Constraint must have a name") - try: - if const.name in self.col_named_constraints: - col, const = self.col_named_constraints.pop(const.name) - - for col_const in list(self.columns[col.name].constraints): - if col_const.name == const.name: - self.columns[col.name].constraints.remove(col_const) - elif constraint_name_string(const.name): - const = self.named_constraints.pop(const.name) - elif const in self.unnamed_constraints: - self.unnamed_constraints.remove(const) - - except KeyError: - if _is_type_bound(const): - # type-bound constraints are only included in the new - # table via their type object in any case, so ignore the - # drop_constraint() that comes here via the - # Operations.implementation_for(alter_column) - return - raise ValueError("No such constraint: '%s'" % const.name) - else: - if isinstance(const, PrimaryKeyConstraint): - for col in const.columns: - self.columns[col.name].primary_key = False - - def create_index(self, idx: Index) -> None: - self.new_indexes[idx.name] = idx # type: ignore[index] - - def drop_index(self, idx: Index) -> None: - try: - del self.indexes[idx.name] # type: ignore[arg-type] - except KeyError: - raise ValueError("No such index: '%s'" % idx.name) - - def rename_table(self, *arg, **kw): - raise NotImplementedError("TODO") diff --git a/myenv/Lib/site-packages/alembic/operations/ops.py b/myenv/Lib/site-packages/alembic/operations/ops.py deleted file mode 100644 index 60b856a..0000000 --- a/myenv/Lib/site-packages/alembic/operations/ops.py +++ /dev/null @@ -1,2800 +0,0 @@ -from __future__ import annotations - -from abc import abstractmethod -import re -from typing import Any -from typing import Callable -from typing import cast -from typing import Dict -from typing import FrozenSet -from typing import Iterator -from typing import List -from typing import MutableMapping -from typing import Optional -from typing import Sequence -from typing import Set -from typing import Tuple -from typing import Type -from typing import TYPE_CHECKING -from typing import TypeVar -from typing import Union - -from sqlalchemy.types import NULLTYPE - -from . import schemaobj -from .base import BatchOperations -from .base import Operations -from .. import util -from ..util import sqla_compat - -if TYPE_CHECKING: - from typing import Literal - - from sqlalchemy.sql import Executable - from sqlalchemy.sql.elements import ColumnElement - from sqlalchemy.sql.elements import conv - from sqlalchemy.sql.elements import quoted_name - from sqlalchemy.sql.elements import TextClause - from sqlalchemy.sql.functions import Function - from sqlalchemy.sql.schema import CheckConstraint - from sqlalchemy.sql.schema import Column - from sqlalchemy.sql.schema import Computed - from sqlalchemy.sql.schema import Constraint - from sqlalchemy.sql.schema import ForeignKeyConstraint - from sqlalchemy.sql.schema import Identity - from sqlalchemy.sql.schema import Index - from sqlalchemy.sql.schema import MetaData - from sqlalchemy.sql.schema import PrimaryKeyConstraint - from sqlalchemy.sql.schema import SchemaItem - from sqlalchemy.sql.schema import Table - from sqlalchemy.sql.schema import UniqueConstraint - from sqlalchemy.sql.selectable import TableClause - from sqlalchemy.sql.type_api import TypeEngine - - from ..autogenerate.rewriter import Rewriter - from ..runtime.migration import MigrationContext - from ..script.revision import _RevIdType - -_T = TypeVar("_T", bound=Any) -_AC = TypeVar("_AC", bound="AddConstraintOp") - - -class MigrateOperation: - """base class for migration command and organization objects. - - This system is part of the operation extensibility API. - - .. seealso:: - - :ref:`operation_objects` - - :ref:`operation_plugins` - - :ref:`customizing_revision` - - """ - - @util.memoized_property - def info(self) -> Dict[Any, Any]: - """A dictionary that may be used to store arbitrary information - along with this :class:`.MigrateOperation` object. - - """ - return {} - - _mutations: FrozenSet[Rewriter] = frozenset() - - def reverse(self) -> MigrateOperation: - raise NotImplementedError - - def to_diff_tuple(self) -> Tuple[Any, ...]: - raise NotImplementedError - - -class AddConstraintOp(MigrateOperation): - """Represent an add constraint operation.""" - - add_constraint_ops = util.Dispatcher() - - @property - def constraint_type(self) -> str: - raise NotImplementedError() - - @classmethod - def register_add_constraint( - cls, type_: str - ) -> Callable[[Type[_AC]], Type[_AC]]: - def go(klass: Type[_AC]) -> Type[_AC]: - cls.add_constraint_ops.dispatch_for(type_)(klass.from_constraint) - return klass - - return go - - @classmethod - def from_constraint(cls, constraint: Constraint) -> AddConstraintOp: - return cls.add_constraint_ops.dispatch(constraint.__visit_name__)( # type: ignore[no-any-return] # noqa: E501 - constraint - ) - - @abstractmethod - def to_constraint( - self, migration_context: Optional[MigrationContext] = None - ) -> Constraint: - pass - - def reverse(self) -> DropConstraintOp: - return DropConstraintOp.from_constraint(self.to_constraint()) - - def to_diff_tuple(self) -> Tuple[str, Constraint]: - return ("add_constraint", self.to_constraint()) - - -@Operations.register_operation("drop_constraint") -@BatchOperations.register_operation("drop_constraint", "batch_drop_constraint") -class DropConstraintOp(MigrateOperation): - """Represent a drop constraint operation.""" - - def __init__( - self, - constraint_name: Optional[sqla_compat._ConstraintNameDefined], - table_name: str, - type_: Optional[str] = None, - *, - schema: Optional[str] = None, - _reverse: Optional[AddConstraintOp] = None, - ) -> None: - self.constraint_name = constraint_name - self.table_name = table_name - self.constraint_type = type_ - self.schema = schema - self._reverse = _reverse - - def reverse(self) -> AddConstraintOp: - return AddConstraintOp.from_constraint(self.to_constraint()) - - def to_diff_tuple( - self, - ) -> Tuple[str, SchemaItem]: - if self.constraint_type == "foreignkey": - return ("remove_fk", self.to_constraint()) - else: - return ("remove_constraint", self.to_constraint()) - - @classmethod - def from_constraint(cls, constraint: Constraint) -> DropConstraintOp: - types = { - "unique_constraint": "unique", - "foreign_key_constraint": "foreignkey", - "primary_key_constraint": "primary", - "check_constraint": "check", - "column_check_constraint": "check", - "table_or_column_check_constraint": "check", - } - - constraint_table = sqla_compat._table_for_constraint(constraint) - return cls( - sqla_compat.constraint_name_or_none(constraint.name), - constraint_table.name, - schema=constraint_table.schema, - type_=types.get(constraint.__visit_name__), - _reverse=AddConstraintOp.from_constraint(constraint), - ) - - def to_constraint(self) -> Constraint: - if self._reverse is not None: - constraint = self._reverse.to_constraint() - constraint.name = self.constraint_name - constraint_table = sqla_compat._table_for_constraint(constraint) - constraint_table.name = self.table_name - constraint_table.schema = self.schema - - return constraint - else: - raise ValueError( - "constraint cannot be produced; " - "original constraint is not present" - ) - - @classmethod - def drop_constraint( - cls, - operations: Operations, - constraint_name: str, - table_name: str, - type_: Optional[str] = None, - *, - schema: Optional[str] = None, - ) -> None: - r"""Drop a constraint of the given name, typically via DROP CONSTRAINT. - - :param constraint_name: name of the constraint. - :param table_name: table name. - :param type\_: optional, required on MySQL. can be - 'foreignkey', 'primary', 'unique', or 'check'. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - - op = cls(constraint_name, table_name, type_=type_, schema=schema) - return operations.invoke(op) - - @classmethod - def batch_drop_constraint( - cls, - operations: BatchOperations, - constraint_name: str, - type_: Optional[str] = None, - ) -> None: - """Issue a "drop constraint" instruction using the - current batch migration context. - - The batch form of this call omits the ``table_name`` and ``schema`` - arguments from the call. - - .. seealso:: - - :meth:`.Operations.drop_constraint` - - """ - op = cls( - constraint_name, - operations.impl.table_name, - type_=type_, - schema=operations.impl.schema, - ) - return operations.invoke(op) - - -@Operations.register_operation("create_primary_key") -@BatchOperations.register_operation( - "create_primary_key", "batch_create_primary_key" -) -@AddConstraintOp.register_add_constraint("primary_key_constraint") -class CreatePrimaryKeyOp(AddConstraintOp): - """Represent a create primary key operation.""" - - constraint_type = "primarykey" - - def __init__( - self, - constraint_name: Optional[sqla_compat._ConstraintNameDefined], - table_name: str, - columns: Sequence[str], - *, - schema: Optional[str] = None, - **kw: Any, - ) -> None: - self.constraint_name = constraint_name - self.table_name = table_name - self.columns = columns - self.schema = schema - self.kw = kw - - @classmethod - def from_constraint(cls, constraint: Constraint) -> CreatePrimaryKeyOp: - constraint_table = sqla_compat._table_for_constraint(constraint) - pk_constraint = cast("PrimaryKeyConstraint", constraint) - return cls( - sqla_compat.constraint_name_or_none(pk_constraint.name), - constraint_table.name, - pk_constraint.columns.keys(), - schema=constraint_table.schema, - **pk_constraint.dialect_kwargs, - ) - - def to_constraint( - self, migration_context: Optional[MigrationContext] = None - ) -> PrimaryKeyConstraint: - schema_obj = schemaobj.SchemaObjects(migration_context) - - return schema_obj.primary_key_constraint( - self.constraint_name, - self.table_name, - self.columns, - schema=self.schema, - **self.kw, - ) - - @classmethod - def create_primary_key( - cls, - operations: Operations, - constraint_name: Optional[str], - table_name: str, - columns: List[str], - *, - schema: Optional[str] = None, - ) -> None: - """Issue a "create primary key" instruction using the current - migration context. - - e.g.:: - - from alembic import op - - op.create_primary_key("pk_my_table", "my_table", ["id", "version"]) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.PrimaryKeyConstraint` - object which it then associates with the - :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param constraint_name: Name of the primary key constraint. The name - is necessary so that an ALTER statement can be emitted. For setups - that use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions` - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param table_name: String name of the target table. - :param columns: a list of string column names to be applied to the - primary key constraint. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - op = cls(constraint_name, table_name, columns, schema=schema) - return operations.invoke(op) - - @classmethod - def batch_create_primary_key( - cls, - operations: BatchOperations, - constraint_name: Optional[str], - columns: List[str], - ) -> None: - """Issue a "create primary key" instruction using the - current batch migration context. - - The batch form of this call omits the ``table_name`` and ``schema`` - arguments from the call. - - .. seealso:: - - :meth:`.Operations.create_primary_key` - - """ - op = cls( - constraint_name, - operations.impl.table_name, - columns, - schema=operations.impl.schema, - ) - return operations.invoke(op) - - -@Operations.register_operation("create_unique_constraint") -@BatchOperations.register_operation( - "create_unique_constraint", "batch_create_unique_constraint" -) -@AddConstraintOp.register_add_constraint("unique_constraint") -class CreateUniqueConstraintOp(AddConstraintOp): - """Represent a create unique constraint operation.""" - - constraint_type = "unique" - - def __init__( - self, - constraint_name: Optional[sqla_compat._ConstraintNameDefined], - table_name: str, - columns: Sequence[str], - *, - schema: Optional[str] = None, - **kw: Any, - ) -> None: - self.constraint_name = constraint_name - self.table_name = table_name - self.columns = columns - self.schema = schema - self.kw = kw - - @classmethod - def from_constraint( - cls, constraint: Constraint - ) -> CreateUniqueConstraintOp: - constraint_table = sqla_compat._table_for_constraint(constraint) - - uq_constraint = cast("UniqueConstraint", constraint) - - kw: Dict[str, Any] = {} - if uq_constraint.deferrable: - kw["deferrable"] = uq_constraint.deferrable - if uq_constraint.initially: - kw["initially"] = uq_constraint.initially - kw.update(uq_constraint.dialect_kwargs) - return cls( - sqla_compat.constraint_name_or_none(uq_constraint.name), - constraint_table.name, - [c.name for c in uq_constraint.columns], - schema=constraint_table.schema, - **kw, - ) - - def to_constraint( - self, migration_context: Optional[MigrationContext] = None - ) -> UniqueConstraint: - schema_obj = schemaobj.SchemaObjects(migration_context) - return schema_obj.unique_constraint( - self.constraint_name, - self.table_name, - self.columns, - schema=self.schema, - **self.kw, - ) - - @classmethod - def create_unique_constraint( - cls, - operations: Operations, - constraint_name: Optional[str], - table_name: str, - columns: Sequence[str], - *, - schema: Optional[str] = None, - **kw: Any, - ) -> Any: - """Issue a "create unique constraint" instruction using the - current migration context. - - e.g.:: - - from alembic import op - op.create_unique_constraint("uq_user_name", "user", ["name"]) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.UniqueConstraint` - object which it then associates with the - :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param name: Name of the unique constraint. The name is necessary - so that an ALTER statement can be emitted. For setups that - use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions`, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param table_name: String name of the source table. - :param columns: a list of string column names in the - source table. - :param deferrable: optional bool. If set, emit DEFERRABLE or - NOT DEFERRABLE when issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY - when issuing DDL for this constraint. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - - op = cls(constraint_name, table_name, columns, schema=schema, **kw) - return operations.invoke(op) - - @classmethod - def batch_create_unique_constraint( - cls, - operations: BatchOperations, - constraint_name: str, - columns: Sequence[str], - **kw: Any, - ) -> Any: - """Issue a "create unique constraint" instruction using the - current batch migration context. - - The batch form of this call omits the ``source`` and ``schema`` - arguments from the call. - - .. seealso:: - - :meth:`.Operations.create_unique_constraint` - - """ - kw["schema"] = operations.impl.schema - op = cls(constraint_name, operations.impl.table_name, columns, **kw) - return operations.invoke(op) - - -@Operations.register_operation("create_foreign_key") -@BatchOperations.register_operation( - "create_foreign_key", "batch_create_foreign_key" -) -@AddConstraintOp.register_add_constraint("foreign_key_constraint") -class CreateForeignKeyOp(AddConstraintOp): - """Represent a create foreign key constraint operation.""" - - constraint_type = "foreignkey" - - def __init__( - self, - constraint_name: Optional[sqla_compat._ConstraintNameDefined], - source_table: str, - referent_table: str, - local_cols: List[str], - remote_cols: List[str], - **kw: Any, - ) -> None: - self.constraint_name = constraint_name - self.source_table = source_table - self.referent_table = referent_table - self.local_cols = local_cols - self.remote_cols = remote_cols - self.kw = kw - - def to_diff_tuple(self) -> Tuple[str, ForeignKeyConstraint]: - return ("add_fk", self.to_constraint()) - - @classmethod - def from_constraint(cls, constraint: Constraint) -> CreateForeignKeyOp: - fk_constraint = cast("ForeignKeyConstraint", constraint) - kw: Dict[str, Any] = {} - if fk_constraint.onupdate: - kw["onupdate"] = fk_constraint.onupdate - if fk_constraint.ondelete: - kw["ondelete"] = fk_constraint.ondelete - if fk_constraint.initially: - kw["initially"] = fk_constraint.initially - if fk_constraint.deferrable: - kw["deferrable"] = fk_constraint.deferrable - if fk_constraint.use_alter: - kw["use_alter"] = fk_constraint.use_alter - if fk_constraint.match: - kw["match"] = fk_constraint.match - - ( - source_schema, - source_table, - source_columns, - target_schema, - target_table, - target_columns, - onupdate, - ondelete, - deferrable, - initially, - ) = sqla_compat._fk_spec(fk_constraint) - - kw["source_schema"] = source_schema - kw["referent_schema"] = target_schema - kw.update(fk_constraint.dialect_kwargs) - return cls( - sqla_compat.constraint_name_or_none(fk_constraint.name), - source_table, - target_table, - source_columns, - target_columns, - **kw, - ) - - def to_constraint( - self, migration_context: Optional[MigrationContext] = None - ) -> ForeignKeyConstraint: - schema_obj = schemaobj.SchemaObjects(migration_context) - return schema_obj.foreign_key_constraint( - self.constraint_name, - self.source_table, - self.referent_table, - self.local_cols, - self.remote_cols, - **self.kw, - ) - - @classmethod - def create_foreign_key( - cls, - operations: Operations, - constraint_name: Optional[str], - source_table: str, - referent_table: str, - local_cols: List[str], - remote_cols: List[str], - *, - onupdate: Optional[str] = None, - ondelete: Optional[str] = None, - deferrable: Optional[bool] = None, - initially: Optional[str] = None, - match: Optional[str] = None, - source_schema: Optional[str] = None, - referent_schema: Optional[str] = None, - **dialect_kw: Any, - ) -> None: - """Issue a "create foreign key" instruction using the - current migration context. - - e.g.:: - - from alembic import op - - op.create_foreign_key( - "fk_user_address", - "address", - "user", - ["user_id"], - ["id"], - ) - - This internally generates a :class:`~sqlalchemy.schema.Table` object - containing the necessary columns, then generates a new - :class:`~sqlalchemy.schema.ForeignKeyConstraint` - object which it then associates with the - :class:`~sqlalchemy.schema.Table`. - Any event listeners associated with this action will be fired - off normally. The :class:`~sqlalchemy.schema.AddConstraint` - construct is ultimately used to generate the ALTER statement. - - :param constraint_name: Name of the foreign key constraint. The name - is necessary so that an ALTER statement can be emitted. For setups - that use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions`, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param source_table: String name of the source table. - :param referent_table: String name of the destination table. - :param local_cols: a list of string column names in the - source table. - :param remote_cols: a list of string column names in the - remote table. - :param onupdate: Optional string. If set, emit ON UPDATE when - issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. - :param ondelete: Optional string. If set, emit ON DELETE when - issuing DDL for this constraint. Typical values include CASCADE, - DELETE and RESTRICT. - :param deferrable: optional bool. If set, emit DEFERRABLE or NOT - DEFERRABLE when issuing DDL for this constraint. - :param source_schema: Optional schema name of the source table. - :param referent_schema: Optional schema name of the destination table. - - """ - - op = cls( - constraint_name, - source_table, - referent_table, - local_cols, - remote_cols, - onupdate=onupdate, - ondelete=ondelete, - deferrable=deferrable, - source_schema=source_schema, - referent_schema=referent_schema, - initially=initially, - match=match, - **dialect_kw, - ) - return operations.invoke(op) - - @classmethod - def batch_create_foreign_key( - cls, - operations: BatchOperations, - constraint_name: Optional[str], - referent_table: str, - local_cols: List[str], - remote_cols: List[str], - *, - referent_schema: Optional[str] = None, - onupdate: Optional[str] = None, - ondelete: Optional[str] = None, - deferrable: Optional[bool] = None, - initially: Optional[str] = None, - match: Optional[str] = None, - **dialect_kw: Any, - ) -> None: - """Issue a "create foreign key" instruction using the - current batch migration context. - - The batch form of this call omits the ``source`` and ``source_schema`` - arguments from the call. - - e.g.:: - - with batch_alter_table("address") as batch_op: - batch_op.create_foreign_key( - "fk_user_address", - "user", - ["user_id"], - ["id"], - ) - - .. seealso:: - - :meth:`.Operations.create_foreign_key` - - """ - op = cls( - constraint_name, - operations.impl.table_name, - referent_table, - local_cols, - remote_cols, - onupdate=onupdate, - ondelete=ondelete, - deferrable=deferrable, - source_schema=operations.impl.schema, - referent_schema=referent_schema, - initially=initially, - match=match, - **dialect_kw, - ) - return operations.invoke(op) - - -@Operations.register_operation("create_check_constraint") -@BatchOperations.register_operation( - "create_check_constraint", "batch_create_check_constraint" -) -@AddConstraintOp.register_add_constraint("check_constraint") -@AddConstraintOp.register_add_constraint("table_or_column_check_constraint") -@AddConstraintOp.register_add_constraint("column_check_constraint") -class CreateCheckConstraintOp(AddConstraintOp): - """Represent a create check constraint operation.""" - - constraint_type = "check" - - def __init__( - self, - constraint_name: Optional[sqla_compat._ConstraintNameDefined], - table_name: str, - condition: Union[str, TextClause, ColumnElement[Any]], - *, - schema: Optional[str] = None, - **kw: Any, - ) -> None: - self.constraint_name = constraint_name - self.table_name = table_name - self.condition = condition - self.schema = schema - self.kw = kw - - @classmethod - def from_constraint( - cls, constraint: Constraint - ) -> CreateCheckConstraintOp: - constraint_table = sqla_compat._table_for_constraint(constraint) - - ck_constraint = cast("CheckConstraint", constraint) - return cls( - sqla_compat.constraint_name_or_none(ck_constraint.name), - constraint_table.name, - cast("ColumnElement[Any]", ck_constraint.sqltext), - schema=constraint_table.schema, - **ck_constraint.dialect_kwargs, - ) - - def to_constraint( - self, migration_context: Optional[MigrationContext] = None - ) -> CheckConstraint: - schema_obj = schemaobj.SchemaObjects(migration_context) - return schema_obj.check_constraint( - self.constraint_name, - self.table_name, - self.condition, - schema=self.schema, - **self.kw, - ) - - @classmethod - def create_check_constraint( - cls, - operations: Operations, - constraint_name: Optional[str], - table_name: str, - condition: Union[str, ColumnElement[bool], TextClause], - *, - schema: Optional[str] = None, - **kw: Any, - ) -> None: - """Issue a "create check constraint" instruction using the - current migration context. - - e.g.:: - - from alembic import op - from sqlalchemy.sql import column, func - - op.create_check_constraint( - "ck_user_name_len", - "user", - func.len(column("name")) > 5, - ) - - CHECK constraints are usually against a SQL expression, so ad-hoc - table metadata is usually needed. The function will convert the given - arguments into a :class:`sqlalchemy.schema.CheckConstraint` bound - to an anonymous table in order to emit the CREATE statement. - - :param name: Name of the check constraint. The name is necessary - so that an ALTER statement can be emitted. For setups that - use an automated naming scheme such as that described at - :ref:`sqla:constraint_naming_conventions`, - ``name`` here can be ``None``, as the event listener will - apply the name to the constraint object when it is associated - with the table. - :param table_name: String name of the source table. - :param condition: SQL expression that's the condition of the - constraint. Can be a string or SQLAlchemy expression language - structure. - :param deferrable: optional bool. If set, emit DEFERRABLE or - NOT DEFERRABLE when issuing DDL for this constraint. - :param initially: optional string. If set, emit INITIALLY - when issuing DDL for this constraint. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - op = cls(constraint_name, table_name, condition, schema=schema, **kw) - return operations.invoke(op) - - @classmethod - def batch_create_check_constraint( - cls, - operations: BatchOperations, - constraint_name: str, - condition: Union[str, ColumnElement[bool], TextClause], - **kw: Any, - ) -> None: - """Issue a "create check constraint" instruction using the - current batch migration context. - - The batch form of this call omits the ``source`` and ``schema`` - arguments from the call. - - .. seealso:: - - :meth:`.Operations.create_check_constraint` - - """ - op = cls( - constraint_name, - operations.impl.table_name, - condition, - schema=operations.impl.schema, - **kw, - ) - return operations.invoke(op) - - -@Operations.register_operation("create_index") -@BatchOperations.register_operation("create_index", "batch_create_index") -class CreateIndexOp(MigrateOperation): - """Represent a create index operation.""" - - def __init__( - self, - index_name: Optional[str], - table_name: str, - columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], - *, - schema: Optional[str] = None, - unique: bool = False, - if_not_exists: Optional[bool] = None, - **kw: Any, - ) -> None: - self.index_name = index_name - self.table_name = table_name - self.columns = columns - self.schema = schema - self.unique = unique - self.if_not_exists = if_not_exists - self.kw = kw - - def reverse(self) -> DropIndexOp: - return DropIndexOp.from_index(self.to_index()) - - def to_diff_tuple(self) -> Tuple[str, Index]: - return ("add_index", self.to_index()) - - @classmethod - def from_index(cls, index: Index) -> CreateIndexOp: - assert index.table is not None - return cls( - index.name, - index.table.name, - index.expressions, - schema=index.table.schema, - unique=index.unique, - **index.kwargs, - ) - - def to_index( - self, migration_context: Optional[MigrationContext] = None - ) -> Index: - schema_obj = schemaobj.SchemaObjects(migration_context) - - idx = schema_obj.index( - self.index_name, - self.table_name, - self.columns, - schema=self.schema, - unique=self.unique, - **self.kw, - ) - return idx - - @classmethod - def create_index( - cls, - operations: Operations, - index_name: Optional[str], - table_name: str, - columns: Sequence[Union[str, TextClause, Function[Any]]], - *, - schema: Optional[str] = None, - unique: bool = False, - if_not_exists: Optional[bool] = None, - **kw: Any, - ) -> None: - r"""Issue a "create index" instruction using the current - migration context. - - e.g.:: - - from alembic import op - - op.create_index("ik_test", "t1", ["foo", "bar"]) - - Functional indexes can be produced by using the - :func:`sqlalchemy.sql.expression.text` construct:: - - from alembic import op - from sqlalchemy import text - - op.create_index("ik_test", "t1", [text("lower(foo)")]) - - :param index_name: name of the index. - :param table_name: name of the owning table. - :param columns: a list consisting of string column names and/or - :func:`~sqlalchemy.sql.expression.text` constructs. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param unique: If True, create a unique index. - - :param quote: Force quoting of this column's name on or off, - corresponding to ``True`` or ``False``. When left at its default - of ``None``, the column identifier will be quoted according to - whether the name is case sensitive (identifiers with at least one - upper case character are treated as case sensitive), or if it's a - reserved word. This flag is only needed to force quoting of a - reserved word which is not known by the SQLAlchemy dialect. - - :param if_not_exists: If True, adds IF NOT EXISTS operator when - creating the new index. - - .. versionadded:: 1.12.0 - - :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. - - """ - op = cls( - index_name, - table_name, - columns, - schema=schema, - unique=unique, - if_not_exists=if_not_exists, - **kw, - ) - return operations.invoke(op) - - @classmethod - def batch_create_index( - cls, - operations: BatchOperations, - index_name: str, - columns: List[str], - **kw: Any, - ) -> None: - """Issue a "create index" instruction using the - current batch migration context. - - .. seealso:: - - :meth:`.Operations.create_index` - - """ - - op = cls( - index_name, - operations.impl.table_name, - columns, - schema=operations.impl.schema, - **kw, - ) - return operations.invoke(op) - - -@Operations.register_operation("drop_index") -@BatchOperations.register_operation("drop_index", "batch_drop_index") -class DropIndexOp(MigrateOperation): - """Represent a drop index operation.""" - - def __init__( - self, - index_name: Union[quoted_name, str, conv], - table_name: Optional[str] = None, - *, - schema: Optional[str] = None, - if_exists: Optional[bool] = None, - _reverse: Optional[CreateIndexOp] = None, - **kw: Any, - ) -> None: - self.index_name = index_name - self.table_name = table_name - self.schema = schema - self.if_exists = if_exists - self._reverse = _reverse - self.kw = kw - - def to_diff_tuple(self) -> Tuple[str, Index]: - return ("remove_index", self.to_index()) - - def reverse(self) -> CreateIndexOp: - return CreateIndexOp.from_index(self.to_index()) - - @classmethod - def from_index(cls, index: Index) -> DropIndexOp: - assert index.table is not None - return cls( - index.name, # type: ignore[arg-type] - table_name=index.table.name, - schema=index.table.schema, - _reverse=CreateIndexOp.from_index(index), - unique=index.unique, - **index.kwargs, - ) - - def to_index( - self, migration_context: Optional[MigrationContext] = None - ) -> Index: - schema_obj = schemaobj.SchemaObjects(migration_context) - - # need a dummy column name here since SQLAlchemy - # 0.7.6 and further raises on Index with no columns - return schema_obj.index( - self.index_name, - self.table_name, - self._reverse.columns if self._reverse else ["x"], - schema=self.schema, - **self.kw, - ) - - @classmethod - def drop_index( - cls, - operations: Operations, - index_name: str, - table_name: Optional[str] = None, - *, - schema: Optional[str] = None, - if_exists: Optional[bool] = None, - **kw: Any, - ) -> None: - r"""Issue a "drop index" instruction using the current - migration context. - - e.g.:: - - drop_index("accounts") - - :param index_name: name of the index. - :param table_name: name of the owning table. Some - backends such as Microsoft SQL Server require this. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - :param if_exists: If True, adds IF EXISTS operator when - dropping the index. - - .. versionadded:: 1.12.0 - - :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. - - """ - op = cls( - index_name, - table_name=table_name, - schema=schema, - if_exists=if_exists, - **kw, - ) - return operations.invoke(op) - - @classmethod - def batch_drop_index( - cls, operations: BatchOperations, index_name: str, **kw: Any - ) -> None: - """Issue a "drop index" instruction using the - current batch migration context. - - .. seealso:: - - :meth:`.Operations.drop_index` - - """ - - op = cls( - index_name, - table_name=operations.impl.table_name, - schema=operations.impl.schema, - **kw, - ) - return operations.invoke(op) - - -@Operations.register_operation("create_table") -class CreateTableOp(MigrateOperation): - """Represent a create table operation.""" - - def __init__( - self, - table_name: str, - columns: Sequence[SchemaItem], - *, - schema: Optional[str] = None, - if_not_exists: Optional[bool] = None, - _namespace_metadata: Optional[MetaData] = None, - _constraints_included: bool = False, - **kw: Any, - ) -> None: - self.table_name = table_name - self.columns = columns - self.schema = schema - self.if_not_exists = if_not_exists - self.info = kw.pop("info", {}) - self.comment = kw.pop("comment", None) - self.prefixes = kw.pop("prefixes", None) - self.kw = kw - self._namespace_metadata = _namespace_metadata - self._constraints_included = _constraints_included - - def reverse(self) -> DropTableOp: - return DropTableOp.from_table( - self.to_table(), _namespace_metadata=self._namespace_metadata - ) - - def to_diff_tuple(self) -> Tuple[str, Table]: - return ("add_table", self.to_table()) - - @classmethod - def from_table( - cls, table: Table, *, _namespace_metadata: Optional[MetaData] = None - ) -> CreateTableOp: - if _namespace_metadata is None: - _namespace_metadata = table.metadata - - return cls( - table.name, - list(table.c) + list(table.constraints), - schema=table.schema, - _namespace_metadata=_namespace_metadata, - # given a Table() object, this Table will contain full Index() - # and UniqueConstraint objects already constructed in response to - # each unique=True / index=True flag on a Column. Carry this - # state along so that when we re-convert back into a Table, we - # skip unique=True/index=True so that these constraints are - # not doubled up. see #844 #848 - _constraints_included=True, - comment=table.comment, - info=dict(table.info), - prefixes=list(table._prefixes), - **table.kwargs, - ) - - def to_table( - self, migration_context: Optional[MigrationContext] = None - ) -> Table: - schema_obj = schemaobj.SchemaObjects(migration_context) - - return schema_obj.table( - self.table_name, - *self.columns, - schema=self.schema, - prefixes=list(self.prefixes) if self.prefixes else [], - comment=self.comment, - info=self.info.copy() if self.info else {}, - _constraints_included=self._constraints_included, - **self.kw, - ) - - @classmethod - def create_table( - cls, - operations: Operations, - table_name: str, - *columns: SchemaItem, - if_not_exists: Optional[bool] = None, - **kw: Any, - ) -> Table: - r"""Issue a "create table" instruction using the current migration - context. - - This directive receives an argument list similar to that of the - traditional :class:`sqlalchemy.schema.Table` construct, but without the - metadata:: - - from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column - from alembic import op - - op.create_table( - "account", - Column("id", INTEGER, primary_key=True), - Column("name", VARCHAR(50), nullable=False), - Column("description", NVARCHAR(200)), - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - Note that :meth:`.create_table` accepts - :class:`~sqlalchemy.schema.Column` - constructs directly from the SQLAlchemy library. In particular, - default values to be created on the database side are - specified using the ``server_default`` parameter, and not - ``default`` which only specifies Python-side defaults:: - - from alembic import op - from sqlalchemy import Column, TIMESTAMP, func - - # specify "DEFAULT NOW" along with the "timestamp" column - op.create_table( - "account", - Column("id", INTEGER, primary_key=True), - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - The function also returns a newly created - :class:`~sqlalchemy.schema.Table` object, corresponding to the table - specification given, which is suitable for - immediate SQL operations, in particular - :meth:`.Operations.bulk_insert`:: - - from sqlalchemy import INTEGER, VARCHAR, NVARCHAR, Column - from alembic import op - - account_table = op.create_table( - "account", - Column("id", INTEGER, primary_key=True), - Column("name", VARCHAR(50), nullable=False), - Column("description", NVARCHAR(200)), - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - op.bulk_insert( - account_table, - [ - {"name": "A1", "description": "account 1"}, - {"name": "A2", "description": "account 2"}, - ], - ) - - :param table_name: Name of the table - :param \*columns: collection of :class:`~sqlalchemy.schema.Column` - objects within - the table, as well as optional :class:`~sqlalchemy.schema.Constraint` - objects - and :class:`~.sqlalchemy.schema.Index` objects. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param if_not_exists: If True, adds IF NOT EXISTS operator when - creating the new table. - - .. versionadded:: 1.13.3 - :param \**kw: Other keyword arguments are passed to the underlying - :class:`sqlalchemy.schema.Table` object created for the command. - - :return: the :class:`~sqlalchemy.schema.Table` object corresponding - to the parameters given. - - """ - op = cls(table_name, columns, if_not_exists=if_not_exists, **kw) - return operations.invoke(op) - - -@Operations.register_operation("drop_table") -class DropTableOp(MigrateOperation): - """Represent a drop table operation.""" - - def __init__( - self, - table_name: str, - *, - schema: Optional[str] = None, - if_exists: Optional[bool] = None, - table_kw: Optional[MutableMapping[Any, Any]] = None, - _reverse: Optional[CreateTableOp] = None, - ) -> None: - self.table_name = table_name - self.schema = schema - self.if_exists = if_exists - self.table_kw = table_kw or {} - self.comment = self.table_kw.pop("comment", None) - self.info = self.table_kw.pop("info", None) - self.prefixes = self.table_kw.pop("prefixes", None) - self._reverse = _reverse - - def to_diff_tuple(self) -> Tuple[str, Table]: - return ("remove_table", self.to_table()) - - def reverse(self) -> CreateTableOp: - return CreateTableOp.from_table(self.to_table()) - - @classmethod - def from_table( - cls, table: Table, *, _namespace_metadata: Optional[MetaData] = None - ) -> DropTableOp: - return cls( - table.name, - schema=table.schema, - table_kw={ - "comment": table.comment, - "info": dict(table.info), - "prefixes": list(table._prefixes), - **table.kwargs, - }, - _reverse=CreateTableOp.from_table( - table, _namespace_metadata=_namespace_metadata - ), - ) - - def to_table( - self, migration_context: Optional[MigrationContext] = None - ) -> Table: - if self._reverse: - cols_and_constraints = self._reverse.columns - else: - cols_and_constraints = [] - - schema_obj = schemaobj.SchemaObjects(migration_context) - t = schema_obj.table( - self.table_name, - *cols_and_constraints, - comment=self.comment, - info=self.info.copy() if self.info else {}, - prefixes=list(self.prefixes) if self.prefixes else [], - schema=self.schema, - _constraints_included=( - self._reverse._constraints_included if self._reverse else False - ), - **self.table_kw, - ) - return t - - @classmethod - def drop_table( - cls, - operations: Operations, - table_name: str, - *, - schema: Optional[str] = None, - if_exists: Optional[bool] = None, - **kw: Any, - ) -> None: - r"""Issue a "drop table" instruction using the current - migration context. - - - e.g.:: - - drop_table("accounts") - - :param table_name: Name of the table - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param if_exists: If True, adds IF EXISTS operator when - dropping the table. - - .. versionadded:: 1.13.3 - :param \**kw: Other keyword arguments are passed to the underlying - :class:`sqlalchemy.schema.Table` object created for the command. - - """ - op = cls(table_name, schema=schema, if_exists=if_exists, table_kw=kw) - operations.invoke(op) - - -class AlterTableOp(MigrateOperation): - """Represent an alter table operation.""" - - def __init__( - self, - table_name: str, - *, - schema: Optional[str] = None, - ) -> None: - self.table_name = table_name - self.schema = schema - - -@Operations.register_operation("rename_table") -class RenameTableOp(AlterTableOp): - """Represent a rename table operation.""" - - def __init__( - self, - old_table_name: str, - new_table_name: str, - *, - schema: Optional[str] = None, - ) -> None: - super().__init__(old_table_name, schema=schema) - self.new_table_name = new_table_name - - @classmethod - def rename_table( - cls, - operations: Operations, - old_table_name: str, - new_table_name: str, - *, - schema: Optional[str] = None, - ) -> None: - """Emit an ALTER TABLE to rename a table. - - :param old_table_name: old name. - :param new_table_name: new name. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - op = cls(old_table_name, new_table_name, schema=schema) - return operations.invoke(op) - - -@Operations.register_operation("create_table_comment") -@BatchOperations.register_operation( - "create_table_comment", "batch_create_table_comment" -) -class CreateTableCommentOp(AlterTableOp): - """Represent a COMMENT ON `table` operation.""" - - def __init__( - self, - table_name: str, - comment: Optional[str], - *, - schema: Optional[str] = None, - existing_comment: Optional[str] = None, - ) -> None: - self.table_name = table_name - self.comment = comment - self.existing_comment = existing_comment - self.schema = schema - - @classmethod - def create_table_comment( - cls, - operations: Operations, - table_name: str, - comment: Optional[str], - *, - existing_comment: Optional[str] = None, - schema: Optional[str] = None, - ) -> None: - """Emit a COMMENT ON operation to set the comment for a table. - - :param table_name: string name of the target table. - :param comment: string value of the comment being registered against - the specified table. - :param existing_comment: String value of a comment - already registered on the specified table, used within autogenerate - so that the operation is reversible, but not required for direct - use. - - .. seealso:: - - :meth:`.Operations.drop_table_comment` - - :paramref:`.Operations.alter_column.comment` - - """ - - op = cls( - table_name, - comment, - existing_comment=existing_comment, - schema=schema, - ) - return operations.invoke(op) - - @classmethod - def batch_create_table_comment( - cls, - operations: BatchOperations, - comment: Optional[str], - *, - existing_comment: Optional[str] = None, - ) -> None: - """Emit a COMMENT ON operation to set the comment for a table - using the current batch migration context. - - :param comment: string value of the comment being registered against - the specified table. - :param existing_comment: String value of a comment - already registered on the specified table, used within autogenerate - so that the operation is reversible, but not required for direct - use. - - """ - - op = cls( - operations.impl.table_name, - comment, - existing_comment=existing_comment, - schema=operations.impl.schema, - ) - return operations.invoke(op) - - def reverse(self) -> Union[CreateTableCommentOp, DropTableCommentOp]: - """Reverses the COMMENT ON operation against a table.""" - if self.existing_comment is None: - return DropTableCommentOp( - self.table_name, - existing_comment=self.comment, - schema=self.schema, - ) - else: - return CreateTableCommentOp( - self.table_name, - self.existing_comment, - existing_comment=self.comment, - schema=self.schema, - ) - - def to_table( - self, migration_context: Optional[MigrationContext] = None - ) -> Table: - schema_obj = schemaobj.SchemaObjects(migration_context) - - return schema_obj.table( - self.table_name, schema=self.schema, comment=self.comment - ) - - def to_diff_tuple(self) -> Tuple[Any, ...]: - return ("add_table_comment", self.to_table(), self.existing_comment) - - -@Operations.register_operation("drop_table_comment") -@BatchOperations.register_operation( - "drop_table_comment", "batch_drop_table_comment" -) -class DropTableCommentOp(AlterTableOp): - """Represent an operation to remove the comment from a table.""" - - def __init__( - self, - table_name: str, - *, - schema: Optional[str] = None, - existing_comment: Optional[str] = None, - ) -> None: - self.table_name = table_name - self.existing_comment = existing_comment - self.schema = schema - - @classmethod - def drop_table_comment( - cls, - operations: Operations, - table_name: str, - *, - existing_comment: Optional[str] = None, - schema: Optional[str] = None, - ) -> None: - """Issue a "drop table comment" operation to - remove an existing comment set on a table. - - :param table_name: string name of the target table. - :param existing_comment: An optional string value of a comment already - registered on the specified table. - - .. seealso:: - - :meth:`.Operations.create_table_comment` - - :paramref:`.Operations.alter_column.comment` - - """ - - op = cls(table_name, existing_comment=existing_comment, schema=schema) - return operations.invoke(op) - - @classmethod - def batch_drop_table_comment( - cls, - operations: BatchOperations, - *, - existing_comment: Optional[str] = None, - ) -> None: - """Issue a "drop table comment" operation to - remove an existing comment set on a table using the current - batch operations context. - - :param existing_comment: An optional string value of a comment already - registered on the specified table. - - """ - - op = cls( - operations.impl.table_name, - existing_comment=existing_comment, - schema=operations.impl.schema, - ) - return operations.invoke(op) - - def reverse(self) -> CreateTableCommentOp: - """Reverses the COMMENT ON operation against a table.""" - return CreateTableCommentOp( - self.table_name, self.existing_comment, schema=self.schema - ) - - def to_table( - self, migration_context: Optional[MigrationContext] = None - ) -> Table: - schema_obj = schemaobj.SchemaObjects(migration_context) - - return schema_obj.table(self.table_name, schema=self.schema) - - def to_diff_tuple(self) -> Tuple[Any, ...]: - return ("remove_table_comment", self.to_table()) - - -@Operations.register_operation("alter_column") -@BatchOperations.register_operation("alter_column", "batch_alter_column") -class AlterColumnOp(AlterTableOp): - """Represent an alter column operation.""" - - def __init__( - self, - table_name: str, - column_name: str, - *, - schema: Optional[str] = None, - existing_type: Optional[Any] = None, - existing_server_default: Any = False, - existing_nullable: Optional[bool] = None, - existing_comment: Optional[str] = None, - modify_nullable: Optional[bool] = None, - modify_comment: Optional[Union[str, Literal[False]]] = False, - modify_server_default: Any = False, - modify_name: Optional[str] = None, - modify_type: Optional[Any] = None, - **kw: Any, - ) -> None: - super().__init__(table_name, schema=schema) - self.column_name = column_name - self.existing_type = existing_type - self.existing_server_default = existing_server_default - self.existing_nullable = existing_nullable - self.existing_comment = existing_comment - self.modify_nullable = modify_nullable - self.modify_comment = modify_comment - self.modify_server_default = modify_server_default - self.modify_name = modify_name - self.modify_type = modify_type - self.kw = kw - - def to_diff_tuple(self) -> Any: - col_diff = [] - schema, tname, cname = self.schema, self.table_name, self.column_name - - if self.modify_type is not None: - col_diff.append( - ( - "modify_type", - schema, - tname, - cname, - { - "existing_nullable": self.existing_nullable, - "existing_server_default": ( - self.existing_server_default - ), - "existing_comment": self.existing_comment, - }, - self.existing_type, - self.modify_type, - ) - ) - - if self.modify_nullable is not None: - col_diff.append( - ( - "modify_nullable", - schema, - tname, - cname, - { - "existing_type": self.existing_type, - "existing_server_default": ( - self.existing_server_default - ), - "existing_comment": self.existing_comment, - }, - self.existing_nullable, - self.modify_nullable, - ) - ) - - if self.modify_server_default is not False: - col_diff.append( - ( - "modify_default", - schema, - tname, - cname, - { - "existing_nullable": self.existing_nullable, - "existing_type": self.existing_type, - "existing_comment": self.existing_comment, - }, - self.existing_server_default, - self.modify_server_default, - ) - ) - - if self.modify_comment is not False: - col_diff.append( - ( - "modify_comment", - schema, - tname, - cname, - { - "existing_nullable": self.existing_nullable, - "existing_type": self.existing_type, - "existing_server_default": ( - self.existing_server_default - ), - }, - self.existing_comment, - self.modify_comment, - ) - ) - - return col_diff - - def has_changes(self) -> bool: - hc1 = ( - self.modify_nullable is not None - or self.modify_server_default is not False - or self.modify_type is not None - or self.modify_comment is not False - ) - if hc1: - return True - for kw in self.kw: - if kw.startswith("modify_"): - return True - else: - return False - - def reverse(self) -> AlterColumnOp: - kw = self.kw.copy() - kw["existing_type"] = self.existing_type - kw["existing_nullable"] = self.existing_nullable - kw["existing_server_default"] = self.existing_server_default - kw["existing_comment"] = self.existing_comment - if self.modify_type is not None: - kw["modify_type"] = self.modify_type - if self.modify_nullable is not None: - kw["modify_nullable"] = self.modify_nullable - if self.modify_server_default is not False: - kw["modify_server_default"] = self.modify_server_default - if self.modify_comment is not False: - kw["modify_comment"] = self.modify_comment - - # TODO: make this a little simpler - all_keys = { - m.group(1) - for m in [re.match(r"^(?:existing_|modify_)(.+)$", k) for k in kw] - if m - } - - for k in all_keys: - if "modify_%s" % k in kw: - swap = kw["existing_%s" % k] - kw["existing_%s" % k] = kw["modify_%s" % k] - kw["modify_%s" % k] = swap - - return self.__class__( - self.table_name, self.column_name, schema=self.schema, **kw - ) - - @classmethod - def alter_column( - cls, - operations: Operations, - table_name: str, - column_name: str, - *, - nullable: Optional[bool] = None, - comment: Optional[Union[str, Literal[False]]] = False, - server_default: Any = False, - new_column_name: Optional[str] = None, - type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = None, - existing_type: Optional[ - Union[TypeEngine[Any], Type[TypeEngine[Any]]] - ] = None, - existing_server_default: Optional[ - Union[str, bool, Identity, Computed] - ] = False, - existing_nullable: Optional[bool] = None, - existing_comment: Optional[str] = None, - schema: Optional[str] = None, - **kw: Any, - ) -> None: - r"""Issue an "alter column" instruction using the - current migration context. - - Generally, only that aspect of the column which - is being changed, i.e. name, type, nullability, - default, needs to be specified. Multiple changes - can also be specified at once and the backend should - "do the right thing", emitting each change either - separately or together as the backend allows. - - MySQL has special requirements here, since MySQL - cannot ALTER a column without a full specification. - When producing MySQL-compatible migration files, - it is recommended that the ``existing_type``, - ``existing_server_default``, and ``existing_nullable`` - parameters be present, if not being altered. - - Type changes which are against the SQLAlchemy - "schema" types :class:`~sqlalchemy.types.Boolean` - and :class:`~sqlalchemy.types.Enum` may also - add or drop constraints which accompany those - types on backends that don't support them natively. - The ``existing_type`` argument is - used in this case to identify and remove a previous - constraint that was bound to the type object. - - :param table_name: string name of the target table. - :param column_name: string name of the target column, - as it exists before the operation begins. - :param nullable: Optional; specify ``True`` or ``False`` - to alter the column's nullability. - :param server_default: Optional; specify a string - SQL expression, :func:`~sqlalchemy.sql.expression.text`, - or :class:`~sqlalchemy.schema.DefaultClause` to indicate - an alteration to the column's default value. - Set to ``None`` to have the default removed. - :param comment: optional string text of a new comment to add to the - column. - :param new_column_name: Optional; specify a string name here to - indicate the new name within a column rename operation. - :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` - type object to specify a change to the column's type. - For SQLAlchemy types that also indicate a constraint (i.e. - :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), - the constraint is also generated. - :param autoincrement: set the ``AUTO_INCREMENT`` flag of the column; - currently understood by the MySQL dialect. - :param existing_type: Optional; a - :class:`~sqlalchemy.types.TypeEngine` - type object to specify the previous type. This - is required for all MySQL column alter operations that - don't otherwise specify a new type, as well as for - when nullability is being changed on a SQL Server - column. It is also used if the type is a so-called - SQLAlchemy "schema" type which may define a constraint (i.e. - :class:`~sqlalchemy.types.Boolean`, - :class:`~sqlalchemy.types.Enum`), - so that the constraint can be dropped. - :param existing_server_default: Optional; The existing - default value of the column. Required on MySQL if - an existing default is not being changed; else MySQL - removes the default. - :param existing_nullable: Optional; the existing nullability - of the column. Required on MySQL if the existing nullability - is not being changed; else MySQL sets this to NULL. - :param existing_autoincrement: Optional; the existing autoincrement - of the column. Used for MySQL's system of altering a column - that specifies ``AUTO_INCREMENT``. - :param existing_comment: string text of the existing comment on the - column to be maintained. Required on MySQL if the existing comment - on the column is not being changed. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param postgresql_using: String argument which will indicate a - SQL expression to render within the Postgresql-specific USING clause - within ALTER COLUMN. This string is taken directly as raw SQL which - must explicitly include any necessary quoting or escaping of tokens - within the expression. - - """ - - alt = cls( - table_name, - column_name, - schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - existing_comment=existing_comment, - modify_name=new_column_name, - modify_type=type_, - modify_server_default=server_default, - modify_nullable=nullable, - modify_comment=comment, - **kw, - ) - - return operations.invoke(alt) - - @classmethod - def batch_alter_column( - cls, - operations: BatchOperations, - column_name: str, - *, - nullable: Optional[bool] = None, - comment: Optional[Union[str, Literal[False]]] = False, - server_default: Any = False, - new_column_name: Optional[str] = None, - type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = None, - existing_type: Optional[ - Union[TypeEngine[Any], Type[TypeEngine[Any]]] - ] = None, - existing_server_default: Optional[ - Union[str, bool, Identity, Computed] - ] = False, - existing_nullable: Optional[bool] = None, - existing_comment: Optional[str] = None, - insert_before: Optional[str] = None, - insert_after: Optional[str] = None, - **kw: Any, - ) -> None: - """Issue an "alter column" instruction using the current - batch migration context. - - Parameters are the same as that of :meth:`.Operations.alter_column`, - as well as the following option(s): - - :param insert_before: String name of an existing column which this - column should be placed before, when creating the new table. - - :param insert_after: String name of an existing column which this - column should be placed after, when creating the new table. If - both :paramref:`.BatchOperations.alter_column.insert_before` - and :paramref:`.BatchOperations.alter_column.insert_after` are - omitted, the column is inserted after the last existing column - in the table. - - .. seealso:: - - :meth:`.Operations.alter_column` - - - """ - alt = cls( - operations.impl.table_name, - column_name, - schema=operations.impl.schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - existing_comment=existing_comment, - modify_name=new_column_name, - modify_type=type_, - modify_server_default=server_default, - modify_nullable=nullable, - modify_comment=comment, - insert_before=insert_before, - insert_after=insert_after, - **kw, - ) - - return operations.invoke(alt) - - -@Operations.register_operation("add_column") -@BatchOperations.register_operation("add_column", "batch_add_column") -class AddColumnOp(AlterTableOp): - """Represent an add column operation.""" - - def __init__( - self, - table_name: str, - column: Column[Any], - *, - schema: Optional[str] = None, - **kw: Any, - ) -> None: - super().__init__(table_name, schema=schema) - self.column = column - self.kw = kw - - def reverse(self) -> DropColumnOp: - return DropColumnOp.from_column_and_tablename( - self.schema, self.table_name, self.column - ) - - def to_diff_tuple( - self, - ) -> Tuple[str, Optional[str], str, Column[Any]]: - return ("add_column", self.schema, self.table_name, self.column) - - def to_column(self) -> Column[Any]: - return self.column - - @classmethod - def from_column(cls, col: Column[Any]) -> AddColumnOp: - return cls(col.table.name, col, schema=col.table.schema) - - @classmethod - def from_column_and_tablename( - cls, - schema: Optional[str], - tname: str, - col: Column[Any], - ) -> AddColumnOp: - return cls(tname, col, schema=schema) - - @classmethod - def add_column( - cls, - operations: Operations, - table_name: str, - column: Column[Any], - *, - schema: Optional[str] = None, - ) -> None: - """Issue an "add column" instruction using the current - migration context. - - e.g.:: - - from alembic import op - from sqlalchemy import Column, String - - op.add_column("organization", Column("name", String())) - - The :meth:`.Operations.add_column` method typically corresponds - to the SQL command "ALTER TABLE... ADD COLUMN". Within the scope - of this command, the column's name, datatype, nullability, - and optional server-generated defaults may be indicated. - - .. note:: - - With the exception of NOT NULL constraints or single-column FOREIGN - KEY constraints, other kinds of constraints such as PRIMARY KEY, - UNIQUE or CHECK constraints **cannot** be generated using this - method; for these constraints, refer to operations such as - :meth:`.Operations.create_primary_key` and - :meth:`.Operations.create_check_constraint`. In particular, the - following :class:`~sqlalchemy.schema.Column` parameters are - **ignored**: - - * :paramref:`~sqlalchemy.schema.Column.primary_key` - SQL databases - typically do not support an ALTER operation that can add - individual columns one at a time to an existing primary key - constraint, therefore it's less ambiguous to use the - :meth:`.Operations.create_primary_key` method, which assumes no - existing primary key constraint is present. - * :paramref:`~sqlalchemy.schema.Column.unique` - use the - :meth:`.Operations.create_unique_constraint` method - * :paramref:`~sqlalchemy.schema.Column.index` - use the - :meth:`.Operations.create_index` method - - - The provided :class:`~sqlalchemy.schema.Column` object may include a - :class:`~sqlalchemy.schema.ForeignKey` constraint directive, - referencing a remote table name. For this specific type of constraint, - Alembic will automatically emit a second ALTER statement in order to - add the single-column FOREIGN KEY constraint separately:: - - from alembic import op - from sqlalchemy import Column, INTEGER, ForeignKey - - op.add_column( - "organization", - Column("account_id", INTEGER, ForeignKey("accounts.id")), - ) - - The column argument passed to :meth:`.Operations.add_column` is a - :class:`~sqlalchemy.schema.Column` construct, used in the same way it's - used in SQLAlchemy. In particular, values or functions to be indicated - as producing the column's default value on the database side are - specified using the ``server_default`` parameter, and not ``default`` - which only specifies Python-side defaults:: - - from alembic import op - from sqlalchemy import Column, TIMESTAMP, func - - # specify "DEFAULT NOW" along with the column add - op.add_column( - "account", - Column("timestamp", TIMESTAMP, server_default=func.now()), - ) - - :param table_name: String name of the parent table. - :param column: a :class:`sqlalchemy.schema.Column` object - representing the new column. - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - - """ - - op = cls(table_name, column, schema=schema) - return operations.invoke(op) - - @classmethod - def batch_add_column( - cls, - operations: BatchOperations, - column: Column[Any], - *, - insert_before: Optional[str] = None, - insert_after: Optional[str] = None, - ) -> None: - """Issue an "add column" instruction using the current - batch migration context. - - .. seealso:: - - :meth:`.Operations.add_column` - - """ - - kw = {} - if insert_before: - kw["insert_before"] = insert_before - if insert_after: - kw["insert_after"] = insert_after - - op = cls( - operations.impl.table_name, - column, - schema=operations.impl.schema, - **kw, - ) - return operations.invoke(op) - - -@Operations.register_operation("drop_column") -@BatchOperations.register_operation("drop_column", "batch_drop_column") -class DropColumnOp(AlterTableOp): - """Represent a drop column operation.""" - - def __init__( - self, - table_name: str, - column_name: str, - *, - schema: Optional[str] = None, - _reverse: Optional[AddColumnOp] = None, - **kw: Any, - ) -> None: - super().__init__(table_name, schema=schema) - self.column_name = column_name - self.kw = kw - self._reverse = _reverse - - def to_diff_tuple( - self, - ) -> Tuple[str, Optional[str], str, Column[Any]]: - return ( - "remove_column", - self.schema, - self.table_name, - self.to_column(), - ) - - def reverse(self) -> AddColumnOp: - if self._reverse is None: - raise ValueError( - "operation is not reversible; " - "original column is not present" - ) - - return AddColumnOp.from_column_and_tablename( - self.schema, self.table_name, self._reverse.column - ) - - @classmethod - def from_column_and_tablename( - cls, - schema: Optional[str], - tname: str, - col: Column[Any], - ) -> DropColumnOp: - return cls( - tname, - col.name, - schema=schema, - _reverse=AddColumnOp.from_column_and_tablename(schema, tname, col), - ) - - def to_column( - self, migration_context: Optional[MigrationContext] = None - ) -> Column[Any]: - if self._reverse is not None: - return self._reverse.column - schema_obj = schemaobj.SchemaObjects(migration_context) - return schema_obj.column(self.column_name, NULLTYPE) - - @classmethod - def drop_column( - cls, - operations: Operations, - table_name: str, - column_name: str, - *, - schema: Optional[str] = None, - **kw: Any, - ) -> None: - """Issue a "drop column" instruction using the current - migration context. - - e.g.:: - - drop_column("organization", "account_id") - - :param table_name: name of table - :param column_name: name of column - :param schema: Optional schema name to operate within. To control - quoting of the schema outside of the default behavior, use - the SQLAlchemy construct - :class:`~sqlalchemy.sql.elements.quoted_name`. - :param mssql_drop_check: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop the CHECK constraint on the column using a - SQL-script-compatible - block that selects into a @variable from sys.check_constraints, - then exec's a separate DROP CONSTRAINT for that constraint. - :param mssql_drop_default: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop the DEFAULT constraint on the column using a - SQL-script-compatible - block that selects into a @variable from sys.default_constraints, - then exec's a separate DROP CONSTRAINT for that default. - :param mssql_drop_foreign_key: Optional boolean. When ``True``, on - Microsoft SQL Server only, first - drop a single FOREIGN KEY constraint on the column using a - SQL-script-compatible - block that selects into a @variable from - sys.foreign_keys/sys.foreign_key_columns, - then exec's a separate DROP CONSTRAINT for that default. Only - works if the column has exactly one FK constraint which refers to - it, at the moment. - - """ - - op = cls(table_name, column_name, schema=schema, **kw) - return operations.invoke(op) - - @classmethod - def batch_drop_column( - cls, operations: BatchOperations, column_name: str, **kw: Any - ) -> None: - """Issue a "drop column" instruction using the current - batch migration context. - - .. seealso:: - - :meth:`.Operations.drop_column` - - """ - op = cls( - operations.impl.table_name, - column_name, - schema=operations.impl.schema, - **kw, - ) - return operations.invoke(op) - - -@Operations.register_operation("bulk_insert") -class BulkInsertOp(MigrateOperation): - """Represent a bulk insert operation.""" - - def __init__( - self, - table: Union[Table, TableClause], - rows: List[Dict[str, Any]], - *, - multiinsert: bool = True, - ) -> None: - self.table = table - self.rows = rows - self.multiinsert = multiinsert - - @classmethod - def bulk_insert( - cls, - operations: Operations, - table: Union[Table, TableClause], - rows: List[Dict[str, Any]], - *, - multiinsert: bool = True, - ) -> None: - """Issue a "bulk insert" operation using the current - migration context. - - This provides a means of representing an INSERT of multiple rows - which works equally well in the context of executing on a live - connection as well as that of generating a SQL script. In the - case of a SQL script, the values are rendered inline into the - statement. - - e.g.:: - - from alembic import op - from datetime import date - from sqlalchemy.sql import table, column - from sqlalchemy import String, Integer, Date - - # Create an ad-hoc table to use for the insert statement. - accounts_table = table( - "account", - column("id", Integer), - column("name", String), - column("create_date", Date), - ) - - op.bulk_insert( - accounts_table, - [ - { - "id": 1, - "name": "John Smith", - "create_date": date(2010, 10, 5), - }, - { - "id": 2, - "name": "Ed Williams", - "create_date": date(2007, 5, 27), - }, - { - "id": 3, - "name": "Wendy Jones", - "create_date": date(2008, 8, 15), - }, - ], - ) - - When using --sql mode, some datatypes may not render inline - automatically, such as dates and other special types. When this - issue is present, :meth:`.Operations.inline_literal` may be used:: - - op.bulk_insert( - accounts_table, - [ - { - "id": 1, - "name": "John Smith", - "create_date": op.inline_literal("2010-10-05"), - }, - { - "id": 2, - "name": "Ed Williams", - "create_date": op.inline_literal("2007-05-27"), - }, - { - "id": 3, - "name": "Wendy Jones", - "create_date": op.inline_literal("2008-08-15"), - }, - ], - multiinsert=False, - ) - - When using :meth:`.Operations.inline_literal` in conjunction with - :meth:`.Operations.bulk_insert`, in order for the statement to work - in "online" (e.g. non --sql) mode, the - :paramref:`~.Operations.bulk_insert.multiinsert` - flag should be set to ``False``, which will have the effect of - individual INSERT statements being emitted to the database, each - with a distinct VALUES clause, so that the "inline" values can - still be rendered, rather than attempting to pass the values - as bound parameters. - - :param table: a table object which represents the target of the INSERT. - - :param rows: a list of dictionaries indicating rows. - - :param multiinsert: when at its default of True and --sql mode is not - enabled, the INSERT statement will be executed using - "executemany()" style, where all elements in the list of - dictionaries are passed as bound parameters in a single - list. Setting this to False results in individual INSERT - statements being emitted per parameter set, and is needed - in those cases where non-literal values are present in the - parameter sets. - - """ - - op = cls(table, rows, multiinsert=multiinsert) - operations.invoke(op) - - -@Operations.register_operation("execute") -@BatchOperations.register_operation("execute", "batch_execute") -class ExecuteSQLOp(MigrateOperation): - """Represent an execute SQL operation.""" - - def __init__( - self, - sqltext: Union[Executable, str], - *, - execution_options: Optional[dict[str, Any]] = None, - ) -> None: - self.sqltext = sqltext - self.execution_options = execution_options - - @classmethod - def execute( - cls, - operations: Operations, - sqltext: Union[Executable, str], - *, - execution_options: Optional[dict[str, Any]] = None, - ) -> None: - r"""Execute the given SQL using the current migration context. - - The given SQL can be a plain string, e.g.:: - - op.execute("INSERT INTO table (foo) VALUES ('some value')") - - Or it can be any kind of Core SQL Expression construct, such as - below where we use an update construct:: - - from sqlalchemy.sql import table, column - from sqlalchemy import String - from alembic import op - - account = table("account", column("name", String)) - op.execute( - account.update() - .where(account.c.name == op.inline_literal("account 1")) - .values({"name": op.inline_literal("account 2")}) - ) - - Above, we made use of the SQLAlchemy - :func:`sqlalchemy.sql.expression.table` and - :func:`sqlalchemy.sql.expression.column` constructs to make a brief, - ad-hoc table construct just for our UPDATE statement. A full - :class:`~sqlalchemy.schema.Table` construct of course works perfectly - fine as well, though note it's a recommended practice to at least - ensure the definition of a table is self-contained within the migration - script, rather than imported from a module that may break compatibility - with older migrations. - - In a SQL script context, the statement is emitted directly to the - output stream. There is *no* return result, however, as this - function is oriented towards generating a change script - that can run in "offline" mode. Additionally, parameterized - statements are discouraged here, as they *will not work* in offline - mode. Above, we use :meth:`.inline_literal` where parameters are - to be used. - - For full interaction with a connected database where parameters can - also be used normally, use the "bind" available from the context:: - - from alembic import op - - connection = op.get_bind() - - connection.execute( - account.update() - .where(account.c.name == "account 1") - .values({"name": "account 2"}) - ) - - Additionally, when passing the statement as a plain string, it is first - coerced into a :func:`sqlalchemy.sql.expression.text` construct - before being passed along. In the less likely case that the - literal SQL string contains a colon, it must be escaped with a - backslash, as:: - - op.execute(r"INSERT INTO table (foo) VALUES ('\:colon_value')") - - - :param sqltext: Any legal SQLAlchemy expression, including: - - * a string - * a :func:`sqlalchemy.sql.expression.text` construct. - * a :func:`sqlalchemy.sql.expression.insert` construct. - * a :func:`sqlalchemy.sql.expression.update` construct. - * a :func:`sqlalchemy.sql.expression.delete` construct. - * Any "executable" described in SQLAlchemy Core documentation, - noting that no result set is returned. - - .. note:: when passing a plain string, the statement is coerced into - a :func:`sqlalchemy.sql.expression.text` construct. This construct - considers symbols with colons, e.g. ``:foo`` to be bound parameters. - To avoid this, ensure that colon symbols are escaped, e.g. - ``\:foo``. - - :param execution_options: Optional dictionary of - execution options, will be passed to - :meth:`sqlalchemy.engine.Connection.execution_options`. - """ - op = cls(sqltext, execution_options=execution_options) - return operations.invoke(op) - - @classmethod - def batch_execute( - cls, - operations: Operations, - sqltext: Union[Executable, str], - *, - execution_options: Optional[dict[str, Any]] = None, - ) -> None: - """Execute the given SQL using the current migration context. - - .. seealso:: - - :meth:`.Operations.execute` - - """ - return cls.execute( - operations, sqltext, execution_options=execution_options - ) - - def to_diff_tuple(self) -> Tuple[str, Union[Executable, str]]: - return ("execute", self.sqltext) - - -class OpContainer(MigrateOperation): - """Represent a sequence of operations operation.""" - - def __init__(self, ops: Sequence[MigrateOperation] = ()) -> None: - self.ops = list(ops) - - def is_empty(self) -> bool: - return not self.ops - - def as_diffs(self) -> Any: - return list(OpContainer._ops_as_diffs(self)) - - @classmethod - def _ops_as_diffs( - cls, migrations: OpContainer - ) -> Iterator[Tuple[Any, ...]]: - for op in migrations.ops: - if hasattr(op, "ops"): - yield from cls._ops_as_diffs(cast("OpContainer", op)) - else: - yield op.to_diff_tuple() - - -class ModifyTableOps(OpContainer): - """Contains a sequence of operations that all apply to a single Table.""" - - def __init__( - self, - table_name: str, - ops: Sequence[MigrateOperation], - *, - schema: Optional[str] = None, - ) -> None: - super().__init__(ops) - self.table_name = table_name - self.schema = schema - - def reverse(self) -> ModifyTableOps: - return ModifyTableOps( - self.table_name, - ops=list(reversed([op.reverse() for op in self.ops])), - schema=self.schema, - ) - - -class UpgradeOps(OpContainer): - """contains a sequence of operations that would apply to the - 'upgrade' stream of a script. - - .. seealso:: - - :ref:`customizing_revision` - - """ - - def __init__( - self, - ops: Sequence[MigrateOperation] = (), - upgrade_token: str = "upgrades", - ) -> None: - super().__init__(ops=ops) - self.upgrade_token = upgrade_token - - def reverse_into(self, downgrade_ops: DowngradeOps) -> DowngradeOps: - downgrade_ops.ops[:] = list( - reversed([op.reverse() for op in self.ops]) - ) - return downgrade_ops - - def reverse(self) -> DowngradeOps: - return self.reverse_into(DowngradeOps(ops=[])) - - -class DowngradeOps(OpContainer): - """contains a sequence of operations that would apply to the - 'downgrade' stream of a script. - - .. seealso:: - - :ref:`customizing_revision` - - """ - - def __init__( - self, - ops: Sequence[MigrateOperation] = (), - downgrade_token: str = "downgrades", - ) -> None: - super().__init__(ops=ops) - self.downgrade_token = downgrade_token - - def reverse(self) -> UpgradeOps: - return UpgradeOps( - ops=list(reversed([op.reverse() for op in self.ops])) - ) - - -class MigrationScript(MigrateOperation): - """represents a migration script. - - E.g. when autogenerate encounters this object, this corresponds to the - production of an actual script file. - - A normal :class:`.MigrationScript` object would contain a single - :class:`.UpgradeOps` and a single :class:`.DowngradeOps` directive. - These are accessible via the ``.upgrade_ops`` and ``.downgrade_ops`` - attributes. - - In the case of an autogenerate operation that runs multiple times, - such as the multiple database example in the "multidb" template, - the ``.upgrade_ops`` and ``.downgrade_ops`` attributes are disabled, - and instead these objects should be accessed via the ``.upgrade_ops_list`` - and ``.downgrade_ops_list`` list-based attributes. These latter - attributes are always available at the very least as single-element lists. - - .. seealso:: - - :ref:`customizing_revision` - - """ - - _needs_render: Optional[bool] - _upgrade_ops: List[UpgradeOps] - _downgrade_ops: List[DowngradeOps] - - def __init__( - self, - rev_id: Optional[str], - upgrade_ops: UpgradeOps, - downgrade_ops: DowngradeOps, - *, - message: Optional[str] = None, - imports: Set[str] = set(), - head: Optional[str] = None, - splice: Optional[bool] = None, - branch_label: Optional[_RevIdType] = None, - version_path: Optional[str] = None, - depends_on: Optional[_RevIdType] = None, - ) -> None: - self.rev_id = rev_id - self.message = message - self.imports = imports - self.head = head - self.splice = splice - self.branch_label = branch_label - self.version_path = version_path - self.depends_on = depends_on - self.upgrade_ops = upgrade_ops - self.downgrade_ops = downgrade_ops - - @property - def upgrade_ops(self) -> Optional[UpgradeOps]: - """An instance of :class:`.UpgradeOps`. - - .. seealso:: - - :attr:`.MigrationScript.upgrade_ops_list` - """ - if len(self._upgrade_ops) > 1: - raise ValueError( - "This MigrationScript instance has a multiple-entry " - "list for UpgradeOps; please use the " - "upgrade_ops_list attribute." - ) - elif not self._upgrade_ops: - return None - else: - return self._upgrade_ops[0] - - @upgrade_ops.setter - def upgrade_ops( - self, upgrade_ops: Union[UpgradeOps, List[UpgradeOps]] - ) -> None: - self._upgrade_ops = util.to_list(upgrade_ops) - for elem in self._upgrade_ops: - assert isinstance(elem, UpgradeOps) - - @property - def downgrade_ops(self) -> Optional[DowngradeOps]: - """An instance of :class:`.DowngradeOps`. - - .. seealso:: - - :attr:`.MigrationScript.downgrade_ops_list` - """ - if len(self._downgrade_ops) > 1: - raise ValueError( - "This MigrationScript instance has a multiple-entry " - "list for DowngradeOps; please use the " - "downgrade_ops_list attribute." - ) - elif not self._downgrade_ops: - return None - else: - return self._downgrade_ops[0] - - @downgrade_ops.setter - def downgrade_ops( - self, downgrade_ops: Union[DowngradeOps, List[DowngradeOps]] - ) -> None: - self._downgrade_ops = util.to_list(downgrade_ops) - for elem in self._downgrade_ops: - assert isinstance(elem, DowngradeOps) - - @property - def upgrade_ops_list(self) -> List[UpgradeOps]: - """A list of :class:`.UpgradeOps` instances. - - This is used in place of the :attr:`.MigrationScript.upgrade_ops` - attribute when dealing with a revision operation that does - multiple autogenerate passes. - - """ - return self._upgrade_ops - - @property - def downgrade_ops_list(self) -> List[DowngradeOps]: - """A list of :class:`.DowngradeOps` instances. - - This is used in place of the :attr:`.MigrationScript.downgrade_ops` - attribute when dealing with a revision operation that does - multiple autogenerate passes. - - """ - return self._downgrade_ops diff --git a/myenv/Lib/site-packages/alembic/operations/schemaobj.py b/myenv/Lib/site-packages/alembic/operations/schemaobj.py deleted file mode 100644 index 59c1002..0000000 --- a/myenv/Lib/site-packages/alembic/operations/schemaobj.py +++ /dev/null @@ -1,290 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -from typing import Any -from typing import Dict -from typing import List -from typing import Optional -from typing import Sequence -from typing import Tuple -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy import schema as sa_schema -from sqlalchemy.sql.schema import Column -from sqlalchemy.sql.schema import Constraint -from sqlalchemy.sql.schema import Index -from sqlalchemy.types import Integer -from sqlalchemy.types import NULLTYPE - -from .. import util -from ..util import sqla_compat - -if TYPE_CHECKING: - from sqlalchemy.sql.elements import ColumnElement - from sqlalchemy.sql.elements import TextClause - from sqlalchemy.sql.schema import CheckConstraint - from sqlalchemy.sql.schema import ForeignKey - from sqlalchemy.sql.schema import ForeignKeyConstraint - from sqlalchemy.sql.schema import MetaData - from sqlalchemy.sql.schema import PrimaryKeyConstraint - from sqlalchemy.sql.schema import Table - from sqlalchemy.sql.schema import UniqueConstraint - from sqlalchemy.sql.type_api import TypeEngine - - from ..runtime.migration import MigrationContext - - -class SchemaObjects: - def __init__( - self, migration_context: Optional[MigrationContext] = None - ) -> None: - self.migration_context = migration_context - - def primary_key_constraint( - self, - name: Optional[sqla_compat._ConstraintNameDefined], - table_name: str, - cols: Sequence[str], - schema: Optional[str] = None, - **dialect_kw, - ) -> PrimaryKeyConstraint: - m = self.metadata() - columns = [sa_schema.Column(n, NULLTYPE) for n in cols] - t = sa_schema.Table(table_name, m, *columns, schema=schema) - # SQLAlchemy primary key constraint name arg is wrongly typed on - # the SQLAlchemy side through 2.0.5 at least - p = sa_schema.PrimaryKeyConstraint( - *[t.c[n] for n in cols], name=name, **dialect_kw # type: ignore - ) - return p - - def foreign_key_constraint( - self, - name: Optional[sqla_compat._ConstraintNameDefined], - source: str, - referent: str, - local_cols: List[str], - remote_cols: List[str], - onupdate: Optional[str] = None, - ondelete: Optional[str] = None, - deferrable: Optional[bool] = None, - source_schema: Optional[str] = None, - referent_schema: Optional[str] = None, - initially: Optional[str] = None, - match: Optional[str] = None, - **dialect_kw, - ) -> ForeignKeyConstraint: - m = self.metadata() - if source == referent and source_schema == referent_schema: - t1_cols = local_cols + remote_cols - else: - t1_cols = local_cols - sa_schema.Table( - referent, - m, - *[sa_schema.Column(n, NULLTYPE) for n in remote_cols], - schema=referent_schema, - ) - - t1 = sa_schema.Table( - source, - m, - *[ - sa_schema.Column(n, NULLTYPE) - for n in util.unique_list(t1_cols) - ], - schema=source_schema, - ) - - tname = ( - "%s.%s" % (referent_schema, referent) - if referent_schema - else referent - ) - - dialect_kw["match"] = match - - f = sa_schema.ForeignKeyConstraint( - local_cols, - ["%s.%s" % (tname, n) for n in remote_cols], - name=name, - onupdate=onupdate, - ondelete=ondelete, - deferrable=deferrable, - initially=initially, - **dialect_kw, - ) - t1.append_constraint(f) - - return f - - def unique_constraint( - self, - name: Optional[sqla_compat._ConstraintNameDefined], - source: str, - local_cols: Sequence[str], - schema: Optional[str] = None, - **kw, - ) -> UniqueConstraint: - t = sa_schema.Table( - source, - self.metadata(), - *[sa_schema.Column(n, NULLTYPE) for n in local_cols], - schema=schema, - ) - kw["name"] = name - uq = sa_schema.UniqueConstraint(*[t.c[n] for n in local_cols], **kw) - # TODO: need event tests to ensure the event - # is fired off here - t.append_constraint(uq) - return uq - - def check_constraint( - self, - name: Optional[sqla_compat._ConstraintNameDefined], - source: str, - condition: Union[str, TextClause, ColumnElement[Any]], - schema: Optional[str] = None, - **kw, - ) -> Union[CheckConstraint]: - t = sa_schema.Table( - source, - self.metadata(), - sa_schema.Column("x", Integer), - schema=schema, - ) - ck = sa_schema.CheckConstraint(condition, name=name, **kw) - t.append_constraint(ck) - return ck - - def generic_constraint( - self, - name: Optional[sqla_compat._ConstraintNameDefined], - table_name: str, - type_: Optional[str], - schema: Optional[str] = None, - **kw, - ) -> Any: - t = self.table(table_name, schema=schema) - types: Dict[Optional[str], Any] = { - "foreignkey": lambda name: sa_schema.ForeignKeyConstraint( - [], [], name=name - ), - "primary": sa_schema.PrimaryKeyConstraint, - "unique": sa_schema.UniqueConstraint, - "check": lambda name: sa_schema.CheckConstraint("", name=name), - None: sa_schema.Constraint, - } - try: - const = types[type_] - except KeyError as ke: - raise TypeError( - "'type' can be one of %s" - % ", ".join(sorted(repr(x) for x in types)) - ) from ke - else: - const = const(name=name) - t.append_constraint(const) - return const - - def metadata(self) -> MetaData: - kw = {} - if ( - self.migration_context is not None - and "target_metadata" in self.migration_context.opts - ): - mt = self.migration_context.opts["target_metadata"] - if hasattr(mt, "naming_convention"): - kw["naming_convention"] = mt.naming_convention - return sa_schema.MetaData(**kw) - - def table(self, name: str, *columns, **kw) -> Table: - m = self.metadata() - - cols = [ - sqla_compat._copy(c) if c.table is not None else c - for c in columns - if isinstance(c, Column) - ] - # these flags have already added their UniqueConstraint / - # Index objects to the table, so flip them off here. - # SQLAlchemy tometadata() avoids this instead by preserving the - # flags and skipping the constraints that have _type_bound on them, - # but for a migration we'd rather list out the constraints - # explicitly. - _constraints_included = kw.pop("_constraints_included", False) - if _constraints_included: - for c in cols: - c.unique = c.index = False - - t = sa_schema.Table(name, m, *cols, **kw) - - constraints = [ - ( - sqla_compat._copy(elem, target_table=t) - if getattr(elem, "parent", None) is not t - and getattr(elem, "parent", None) is not None - else elem - ) - for elem in columns - if isinstance(elem, (Constraint, Index)) - ] - - for const in constraints: - t.append_constraint(const) - - for f in t.foreign_keys: - self._ensure_table_for_fk(m, f) - return t - - def column(self, name: str, type_: TypeEngine, **kw) -> Column: - return sa_schema.Column(name, type_, **kw) - - def index( - self, - name: Optional[str], - tablename: Optional[str], - columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], - schema: Optional[str] = None, - **kw, - ) -> Index: - t = sa_schema.Table( - tablename or "no_table", - self.metadata(), - schema=schema, - ) - kw["_table"] = t - idx = sa_schema.Index( - name, - *[util.sqla_compat._textual_index_column(t, n) for n in columns], - **kw, - ) - return idx - - def _parse_table_key(self, table_key: str) -> Tuple[Optional[str], str]: - if "." in table_key: - tokens = table_key.split(".") - sname: Optional[str] = ".".join(tokens[0:-1]) - tname = tokens[-1] - else: - tname = table_key - sname = None - return (sname, tname) - - def _ensure_table_for_fk(self, metadata: MetaData, fk: ForeignKey) -> None: - """create a placeholder Table object for the referent of a - ForeignKey. - - """ - if isinstance(fk._colspec, str): - table_key, cname = fk._colspec.rsplit(".", 1) - sname, tname = self._parse_table_key(table_key) - if table_key not in metadata.tables: - rel_t = sa_schema.Table(tname, metadata, schema=sname) - else: - rel_t = metadata.tables[table_key] - if cname not in rel_t.c: - rel_t.append_column(sa_schema.Column(cname, NULLTYPE)) diff --git a/myenv/Lib/site-packages/alembic/operations/toimpl.py b/myenv/Lib/site-packages/alembic/operations/toimpl.py deleted file mode 100644 index 4b96004..0000000 --- a/myenv/Lib/site-packages/alembic/operations/toimpl.py +++ /dev/null @@ -1,238 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from typing import TYPE_CHECKING - -from sqlalchemy import schema as sa_schema - -from . import ops -from .base import Operations -from ..util.sqla_compat import _copy -from ..util.sqla_compat import sqla_14 - -if TYPE_CHECKING: - from sqlalchemy.sql.schema import Table - - -@Operations.implementation_for(ops.AlterColumnOp) -def alter_column( - operations: "Operations", operation: "ops.AlterColumnOp" -) -> None: - compiler = operations.impl.dialect.statement_compiler( - operations.impl.dialect, None - ) - - existing_type = operation.existing_type - existing_nullable = operation.existing_nullable - existing_server_default = operation.existing_server_default - type_ = operation.modify_type - column_name = operation.column_name - table_name = operation.table_name - schema = operation.schema - server_default = operation.modify_server_default - new_column_name = operation.modify_name - nullable = operation.modify_nullable - comment = operation.modify_comment - existing_comment = operation.existing_comment - - def _count_constraint(constraint): - return not isinstance(constraint, sa_schema.PrimaryKeyConstraint) and ( - not constraint._create_rule or constraint._create_rule(compiler) - ) - - if existing_type and type_: - t = operations.schema_obj.table( - table_name, - sa_schema.Column(column_name, existing_type), - schema=schema, - ) - for constraint in t.constraints: - if _count_constraint(constraint): - operations.impl.drop_constraint(constraint) - - operations.impl.alter_column( - table_name, - column_name, - nullable=nullable, - server_default=server_default, - name=new_column_name, - type_=type_, - schema=schema, - existing_type=existing_type, - existing_server_default=existing_server_default, - existing_nullable=existing_nullable, - comment=comment, - existing_comment=existing_comment, - **operation.kw, - ) - - if type_: - t = operations.schema_obj.table( - table_name, - operations.schema_obj.column(column_name, type_), - schema=schema, - ) - for constraint in t.constraints: - if _count_constraint(constraint): - operations.impl.add_constraint(constraint) - - -@Operations.implementation_for(ops.DropTableOp) -def drop_table(operations: "Operations", operation: "ops.DropTableOp") -> None: - kw = {} - if operation.if_exists is not None: - if not sqla_14: - raise NotImplementedError("SQLAlchemy 1.4+ required") - - kw["if_exists"] = operation.if_exists - operations.impl.drop_table( - operation.to_table(operations.migration_context), **kw - ) - - -@Operations.implementation_for(ops.DropColumnOp) -def drop_column( - operations: "Operations", operation: "ops.DropColumnOp" -) -> None: - column = operation.to_column(operations.migration_context) - operations.impl.drop_column( - operation.table_name, column, schema=operation.schema, **operation.kw - ) - - -@Operations.implementation_for(ops.CreateIndexOp) -def create_index( - operations: "Operations", operation: "ops.CreateIndexOp" -) -> None: - idx = operation.to_index(operations.migration_context) - kw = {} - if operation.if_not_exists is not None: - if not sqla_14: - raise NotImplementedError("SQLAlchemy 1.4+ required") - - kw["if_not_exists"] = operation.if_not_exists - operations.impl.create_index(idx, **kw) - - -@Operations.implementation_for(ops.DropIndexOp) -def drop_index(operations: "Operations", operation: "ops.DropIndexOp") -> None: - kw = {} - if operation.if_exists is not None: - if not sqla_14: - raise NotImplementedError("SQLAlchemy 1.4+ required") - - kw["if_exists"] = operation.if_exists - - operations.impl.drop_index( - operation.to_index(operations.migration_context), - **kw, - ) - - -@Operations.implementation_for(ops.CreateTableOp) -def create_table( - operations: "Operations", operation: "ops.CreateTableOp" -) -> "Table": - kw = {} - if operation.if_not_exists is not None: - if not sqla_14: - raise NotImplementedError("SQLAlchemy 1.4+ required") - - kw["if_not_exists"] = operation.if_not_exists - table = operation.to_table(operations.migration_context) - operations.impl.create_table(table, **kw) - return table - - -@Operations.implementation_for(ops.RenameTableOp) -def rename_table( - operations: "Operations", operation: "ops.RenameTableOp" -) -> None: - operations.impl.rename_table( - operation.table_name, operation.new_table_name, schema=operation.schema - ) - - -@Operations.implementation_for(ops.CreateTableCommentOp) -def create_table_comment( - operations: "Operations", operation: "ops.CreateTableCommentOp" -) -> None: - table = operation.to_table(operations.migration_context) - operations.impl.create_table_comment(table) - - -@Operations.implementation_for(ops.DropTableCommentOp) -def drop_table_comment( - operations: "Operations", operation: "ops.DropTableCommentOp" -) -> None: - table = operation.to_table(operations.migration_context) - operations.impl.drop_table_comment(table) - - -@Operations.implementation_for(ops.AddColumnOp) -def add_column(operations: "Operations", operation: "ops.AddColumnOp") -> None: - table_name = operation.table_name - column = operation.column - schema = operation.schema - kw = operation.kw - - if column.table is not None: - column = _copy(column) - - t = operations.schema_obj.table(table_name, column, schema=schema) - operations.impl.add_column(table_name, column, schema=schema, **kw) - - for constraint in t.constraints: - if not isinstance(constraint, sa_schema.PrimaryKeyConstraint): - operations.impl.add_constraint(constraint) - for index in t.indexes: - operations.impl.create_index(index) - - with_comment = ( - operations.impl.dialect.supports_comments - and not operations.impl.dialect.inline_comments - ) - comment = column.comment - if comment and with_comment: - operations.impl.create_column_comment(column) - - -@Operations.implementation_for(ops.AddConstraintOp) -def create_constraint( - operations: "Operations", operation: "ops.AddConstraintOp" -) -> None: - operations.impl.add_constraint( - operation.to_constraint(operations.migration_context) - ) - - -@Operations.implementation_for(ops.DropConstraintOp) -def drop_constraint( - operations: "Operations", operation: "ops.DropConstraintOp" -) -> None: - operations.impl.drop_constraint( - operations.schema_obj.generic_constraint( - operation.constraint_name, - operation.table_name, - operation.constraint_type, - schema=operation.schema, - ) - ) - - -@Operations.implementation_for(ops.BulkInsertOp) -def bulk_insert( - operations: "Operations", operation: "ops.BulkInsertOp" -) -> None: - operations.impl.bulk_insert( # type: ignore[union-attr] - operation.table, operation.rows, multiinsert=operation.multiinsert - ) - - -@Operations.implementation_for(ops.ExecuteSQLOp) -def execute_sql( - operations: "Operations", operation: "ops.ExecuteSQLOp" -) -> None: - operations.migration_context.impl.execute( - operation.sqltext, execution_options=operation.execution_options - ) diff --git a/myenv/Lib/site-packages/alembic/py.typed b/myenv/Lib/site-packages/alembic/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/alembic/runtime/__init__.py b/myenv/Lib/site-packages/alembic/runtime/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/alembic/runtime/environment.py b/myenv/Lib/site-packages/alembic/runtime/environment.py deleted file mode 100644 index a30972e..0000000 --- a/myenv/Lib/site-packages/alembic/runtime/environment.py +++ /dev/null @@ -1,1051 +0,0 @@ -from __future__ import annotations - -from typing import Any -from typing import Callable -from typing import Collection -from typing import ContextManager -from typing import Dict -from typing import List -from typing import Mapping -from typing import MutableMapping -from typing import Optional -from typing import overload -from typing import Sequence -from typing import TextIO -from typing import Tuple -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy.sql.schema import Column -from sqlalchemy.sql.schema import FetchedValue -from typing_extensions import Literal - -from .migration import _ProxyTransaction -from .migration import MigrationContext -from .. import util -from ..operations import Operations -from ..script.revision import _GetRevArg - -if TYPE_CHECKING: - from sqlalchemy.engine import URL - from sqlalchemy.engine.base import Connection - from sqlalchemy.sql import Executable - from sqlalchemy.sql.schema import MetaData - from sqlalchemy.sql.schema import SchemaItem - from sqlalchemy.sql.type_api import TypeEngine - - from .migration import MigrationInfo - from ..autogenerate.api import AutogenContext - from ..config import Config - from ..ddl import DefaultImpl - from ..operations.ops import MigrationScript - from ..script.base import ScriptDirectory - -_RevNumber = Optional[Union[str, Tuple[str, ...]]] - -ProcessRevisionDirectiveFn = Callable[ - [MigrationContext, _GetRevArg, List["MigrationScript"]], None -] - -RenderItemFn = Callable[ - [str, Any, "AutogenContext"], Union[str, Literal[False]] -] - -NameFilterType = Literal[ - "schema", - "table", - "column", - "index", - "unique_constraint", - "foreign_key_constraint", -] -NameFilterParentNames = MutableMapping[ - Literal["schema_name", "table_name", "schema_qualified_table_name"], - Optional[str], -] -IncludeNameFn = Callable[ - [Optional[str], NameFilterType, NameFilterParentNames], bool -] - -IncludeObjectFn = Callable[ - [ - "SchemaItem", - Optional[str], - NameFilterType, - bool, - Optional["SchemaItem"], - ], - bool, -] - -OnVersionApplyFn = Callable[ - [MigrationContext, "MigrationInfo", Collection[Any], Mapping[str, Any]], - None, -] - -CompareServerDefault = Callable[ - [ - MigrationContext, - "Column[Any]", - "Column[Any]", - Optional[str], - Optional[FetchedValue], - Optional[str], - ], - Optional[bool], -] - -CompareType = Callable[ - [ - MigrationContext, - "Column[Any]", - "Column[Any]", - "TypeEngine[Any]", - "TypeEngine[Any]", - ], - Optional[bool], -] - - -class EnvironmentContext(util.ModuleClsProxy): - """A configurational facade made available in an ``env.py`` script. - - The :class:`.EnvironmentContext` acts as a *facade* to the more - nuts-and-bolts objects of :class:`.MigrationContext` as well as certain - aspects of :class:`.Config`, - within the context of the ``env.py`` script that is invoked by - most Alembic commands. - - :class:`.EnvironmentContext` is normally instantiated - when a command in :mod:`alembic.command` is run. It then makes - itself available in the ``alembic.context`` module for the scope - of the command. From within an ``env.py`` script, the current - :class:`.EnvironmentContext` is available by importing this module. - - :class:`.EnvironmentContext` also supports programmatic usage. - At this level, it acts as a Python context manager, that is, is - intended to be used using the - ``with:`` statement. A typical use of :class:`.EnvironmentContext`:: - - from alembic.config import Config - from alembic.script import ScriptDirectory - - config = Config() - config.set_main_option("script_location", "myapp:migrations") - script = ScriptDirectory.from_config(config) - - - def my_function(rev, context): - '''do something with revision "rev", which - will be the current database revision, - and "context", which is the MigrationContext - that the env.py will create''' - - - with EnvironmentContext( - config, - script, - fn=my_function, - as_sql=False, - starting_rev="base", - destination_rev="head", - tag="sometag", - ): - script.run_env() - - The above script will invoke the ``env.py`` script - within the migration environment. If and when ``env.py`` - calls :meth:`.MigrationContext.run_migrations`, the - ``my_function()`` function above will be called - by the :class:`.MigrationContext`, given the context - itself as well as the current revision in the database. - - .. note:: - - For most API usages other than full blown - invocation of migration scripts, the :class:`.MigrationContext` - and :class:`.ScriptDirectory` objects can be created and - used directly. The :class:`.EnvironmentContext` object - is *only* needed when you need to actually invoke the - ``env.py`` module present in the migration environment. - - """ - - _migration_context: Optional[MigrationContext] = None - - config: Config = None # type:ignore[assignment] - """An instance of :class:`.Config` representing the - configuration file contents as well as other variables - set programmatically within it.""" - - script: ScriptDirectory = None # type:ignore[assignment] - """An instance of :class:`.ScriptDirectory` which provides - programmatic access to version files within the ``versions/`` - directory. - - """ - - def __init__( - self, config: Config, script: ScriptDirectory, **kw: Any - ) -> None: - r"""Construct a new :class:`.EnvironmentContext`. - - :param config: a :class:`.Config` instance. - :param script: a :class:`.ScriptDirectory` instance. - :param \**kw: keyword options that will be ultimately - passed along to the :class:`.MigrationContext` when - :meth:`.EnvironmentContext.configure` is called. - - """ - self.config = config - self.script = script - self.context_opts = kw - - def __enter__(self) -> EnvironmentContext: - """Establish a context which provides a - :class:`.EnvironmentContext` object to - env.py scripts. - - The :class:`.EnvironmentContext` will - be made available as ``from alembic import context``. - - """ - self._install_proxy() - return self - - def __exit__(self, *arg: Any, **kw: Any) -> None: - self._remove_proxy() - - def is_offline_mode(self) -> bool: - """Return True if the current migrations environment - is running in "offline mode". - - This is ``True`` or ``False`` depending - on the ``--sql`` flag passed. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - return self.context_opts.get("as_sql", False) # type: ignore[no-any-return] # noqa: E501 - - def is_transactional_ddl(self) -> bool: - """Return True if the context is configured to expect a - transactional DDL capable backend. - - This defaults to the type of database in use, and - can be overridden by the ``transactional_ddl`` argument - to :meth:`.configure` - - This function requires that a :class:`.MigrationContext` - has first been made available via :meth:`.configure`. - - """ - return self.get_context().impl.transactional_ddl - - def requires_connection(self) -> bool: - return not self.is_offline_mode() - - def get_head_revision(self) -> _RevNumber: - """Return the hex identifier of the 'head' script revision. - - If the script directory has multiple heads, this - method raises a :class:`.CommandError`; - :meth:`.EnvironmentContext.get_head_revisions` should be preferred. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - .. seealso:: :meth:`.EnvironmentContext.get_head_revisions` - - """ - return self.script.as_revision_number("head") - - def get_head_revisions(self) -> _RevNumber: - """Return the hex identifier of the 'heads' script revision(s). - - This returns a tuple containing the version number of all - heads in the script directory. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - return self.script.as_revision_number("heads") - - def get_starting_revision_argument(self) -> _RevNumber: - """Return the 'starting revision' argument, - if the revision was passed using ``start:end``. - - This is only meaningful in "offline" mode. - Returns ``None`` if no value is available - or was configured. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - if self._migration_context is not None: - return self.script.as_revision_number( - self.get_context()._start_from_rev - ) - elif "starting_rev" in self.context_opts: - return self.script.as_revision_number( - self.context_opts["starting_rev"] - ) - else: - # this should raise only in the case that a command - # is being run where the "starting rev" is never applicable; - # this is to catch scripts which rely upon this in - # non-sql mode or similar - raise util.CommandError( - "No starting revision argument is available." - ) - - def get_revision_argument(self) -> _RevNumber: - """Get the 'destination' revision argument. - - This is typically the argument passed to the - ``upgrade`` or ``downgrade`` command. - - If it was specified as ``head``, the actual - version number is returned; if specified - as ``base``, ``None`` is returned. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - """ - return self.script.as_revision_number( - self.context_opts["destination_rev"] - ) - - def get_tag_argument(self) -> Optional[str]: - """Return the value passed for the ``--tag`` argument, if any. - - The ``--tag`` argument is not used directly by Alembic, - but is available for custom ``env.py`` configurations that - wish to use it; particularly for offline generation scripts - that wish to generate tagged filenames. - - This function does not require that the :class:`.MigrationContext` - has been configured. - - .. seealso:: - - :meth:`.EnvironmentContext.get_x_argument` - a newer and more - open ended system of extending ``env.py`` scripts via the command - line. - - """ - return self.context_opts.get("tag", None) # type: ignore[no-any-return] # noqa: E501 - - @overload - def get_x_argument(self, as_dictionary: Literal[False]) -> List[str]: ... - - @overload - def get_x_argument( - self, as_dictionary: Literal[True] - ) -> Dict[str, str]: ... - - @overload - def get_x_argument( - self, as_dictionary: bool = ... - ) -> Union[List[str], Dict[str, str]]: ... - - def get_x_argument( - self, as_dictionary: bool = False - ) -> Union[List[str], Dict[str, str]]: - """Return the value(s) passed for the ``-x`` argument, if any. - - The ``-x`` argument is an open ended flag that allows any user-defined - value or values to be passed on the command line, then available - here for consumption by a custom ``env.py`` script. - - The return value is a list, returned directly from the ``argparse`` - structure. If ``as_dictionary=True`` is passed, the ``x`` arguments - are parsed using ``key=value`` format into a dictionary that is - then returned. If there is no ``=`` in the argument, value is an empty - string. - - .. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when - arguments are passed without the ``=`` symbol. - - For example, to support passing a database URL on the command line, - the standard ``env.py`` script can be modified like this:: - - cmd_line_url = context.get_x_argument( - as_dictionary=True).get('dbname') - if cmd_line_url: - engine = create_engine(cmd_line_url) - else: - engine = engine_from_config( - config.get_section(config.config_ini_section), - prefix='sqlalchemy.', - poolclass=pool.NullPool) - - This then takes effect by running the ``alembic`` script as:: - - alembic -x dbname=postgresql://user:pass@host/dbname upgrade head - - This function does not require that the :class:`.MigrationContext` - has been configured. - - .. seealso:: - - :meth:`.EnvironmentContext.get_tag_argument` - - :attr:`.Config.cmd_opts` - - """ - if self.config.cmd_opts is not None: - value = self.config.cmd_opts.x or [] - else: - value = [] - if as_dictionary: - dict_value = {} - for arg in value: - x_key, _, x_value = arg.partition("=") - dict_value[x_key] = x_value - value = dict_value - - return value - - def configure( - self, - connection: Optional[Connection] = None, - url: Optional[Union[str, URL]] = None, - dialect_name: Optional[str] = None, - dialect_opts: Optional[Dict[str, Any]] = None, - transactional_ddl: Optional[bool] = None, - transaction_per_migration: bool = False, - output_buffer: Optional[TextIO] = None, - starting_rev: Optional[str] = None, - tag: Optional[str] = None, - template_args: Optional[Dict[str, Any]] = None, - render_as_batch: bool = False, - target_metadata: Union[MetaData, Sequence[MetaData], None] = None, - include_name: Optional[IncludeNameFn] = None, - include_object: Optional[IncludeObjectFn] = None, - include_schemas: bool = False, - process_revision_directives: Optional[ - ProcessRevisionDirectiveFn - ] = None, - compare_type: Union[bool, CompareType] = True, - compare_server_default: Union[bool, CompareServerDefault] = False, - render_item: Optional[RenderItemFn] = None, - literal_binds: bool = False, - upgrade_token: str = "upgrades", - downgrade_token: str = "downgrades", - alembic_module_prefix: str = "op.", - sqlalchemy_module_prefix: str = "sa.", - user_module_prefix: Optional[str] = None, - on_version_apply: Optional[OnVersionApplyFn] = None, - **kw: Any, - ) -> None: - """Configure a :class:`.MigrationContext` within this - :class:`.EnvironmentContext` which will provide database - connectivity and other configuration to a series of - migration scripts. - - Many methods on :class:`.EnvironmentContext` require that - this method has been called in order to function, as they - ultimately need to have database access or at least access - to the dialect in use. Those which do are documented as such. - - The important thing needed by :meth:`.configure` is a - means to determine what kind of database dialect is in use. - An actual connection to that database is needed only if - the :class:`.MigrationContext` is to be used in - "online" mode. - - If the :meth:`.is_offline_mode` function returns ``True``, - then no connection is needed here. Otherwise, the - ``connection`` parameter should be present as an - instance of :class:`sqlalchemy.engine.Connection`. - - This function is typically called from the ``env.py`` - script within a migration environment. It can be called - multiple times for an invocation. The most recent - :class:`~sqlalchemy.engine.Connection` - for which it was called is the one that will be operated upon - by the next call to :meth:`.run_migrations`. - - General parameters: - - :param connection: a :class:`~sqlalchemy.engine.Connection` - to use - for SQL execution in "online" mode. When present, is also - used to determine the type of dialect in use. - :param url: a string database url, or a - :class:`sqlalchemy.engine.url.URL` object. - The type of dialect to be used will be derived from this if - ``connection`` is not passed. - :param dialect_name: string name of a dialect, such as - "postgresql", "mssql", etc. - The type of dialect to be used will be derived from this if - ``connection`` and ``url`` are not passed. - :param dialect_opts: dictionary of options to be passed to dialect - constructor. - :param transactional_ddl: Force the usage of "transactional" - DDL on or off; - this otherwise defaults to whether or not the dialect in - use supports it. - :param transaction_per_migration: if True, nest each migration script - in a transaction rather than the full series of migrations to - run. - :param output_buffer: a file-like object that will be used - for textual output - when the ``--sql`` option is used to generate SQL scripts. - Defaults to - ``sys.stdout`` if not passed here and also not present on - the :class:`.Config` - object. The value here overrides that of the :class:`.Config` - object. - :param output_encoding: when using ``--sql`` to generate SQL - scripts, apply this encoding to the string output. - :param literal_binds: when using ``--sql`` to generate SQL - scripts, pass through the ``literal_binds`` flag to the compiler - so that any literal values that would ordinarily be bound - parameters are converted to plain strings. - - .. warning:: Dialects can typically only handle simple datatypes - like strings and numbers for auto-literal generation. Datatypes - like dates, intervals, and others may still require manual - formatting, typically using :meth:`.Operations.inline_literal`. - - .. note:: the ``literal_binds`` flag is ignored on SQLAlchemy - versions prior to 0.8 where this feature is not supported. - - .. seealso:: - - :meth:`.Operations.inline_literal` - - :param starting_rev: Override the "starting revision" argument - when using ``--sql`` mode. - :param tag: a string tag for usage by custom ``env.py`` scripts. - Set via the ``--tag`` option, can be overridden here. - :param template_args: dictionary of template arguments which - will be added to the template argument environment when - running the "revision" command. Note that the script environment - is only run within the "revision" command if the --autogenerate - option is used, or if the option "revision_environment=true" - is present in the alembic.ini file. - - :param version_table: The name of the Alembic version table. - The default is ``'alembic_version'``. - :param version_table_schema: Optional schema to place version - table within. - :param version_table_pk: boolean, whether the Alembic version table - should use a primary key constraint for the "value" column; this - only takes effect when the table is first created. - Defaults to True; setting to False should not be necessary and is - here for backwards compatibility reasons. - :param on_version_apply: a callable or collection of callables to be - run for each migration step. - The callables will be run in the order they are given, once for - each migration step, after the respective operation has been - applied but before its transaction is finalized. - Each callable accepts no positional arguments and the following - keyword arguments: - - * ``ctx``: the :class:`.MigrationContext` running the migration, - * ``step``: a :class:`.MigrationInfo` representing the - step currently being applied, - * ``heads``: a collection of version strings representing the - current heads, - * ``run_args``: the ``**kwargs`` passed to :meth:`.run_migrations`. - - Parameters specific to the autogenerate feature, when - ``alembic revision`` is run with the ``--autogenerate`` feature: - - :param target_metadata: a :class:`sqlalchemy.schema.MetaData` - object, or a sequence of :class:`~sqlalchemy.schema.MetaData` - objects, that will be consulted during autogeneration. - The tables present in each :class:`~sqlalchemy.schema.MetaData` - will be compared against - what is locally available on the target - :class:`~sqlalchemy.engine.Connection` - to produce candidate upgrade/downgrade operations. - :param compare_type: Indicates type comparison behavior during - an autogenerate - operation. Defaults to ``True`` turning on type comparison, which - has good accuracy on most backends. See :ref:`compare_types` - for an example as well as information on other type - comparison options. Set to ``False`` which disables type - comparison. A callable can also be passed to provide custom type - comparison, see :ref:`compare_types` for additional details. - - .. versionchanged:: 1.12.0 The default value of - :paramref:`.EnvironmentContext.configure.compare_type` has been - changed to ``True``. - - .. seealso:: - - :ref:`compare_types` - - :paramref:`.EnvironmentContext.configure.compare_server_default` - - :param compare_server_default: Indicates server default comparison - behavior during - an autogenerate operation. Defaults to ``False`` which disables - server default - comparison. Set to ``True`` to turn on server default comparison, - which has - varied accuracy depending on backend. - - To customize server default comparison behavior, a callable may - be specified - which can filter server default comparisons during an - autogenerate operation. - defaults during an autogenerate operation. The format of this - callable is:: - - def my_compare_server_default(context, inspected_column, - metadata_column, inspected_default, metadata_default, - rendered_metadata_default): - # return True if the defaults are different, - # False if not, or None to allow the default implementation - # to compare these defaults - return None - - context.configure( - # ... - compare_server_default = my_compare_server_default - ) - - ``inspected_column`` is a dictionary structure as returned by - :meth:`sqlalchemy.engine.reflection.Inspector.get_columns`, whereas - ``metadata_column`` is a :class:`sqlalchemy.schema.Column` from - the local model environment. - - A return value of ``None`` indicates to allow default server default - comparison - to proceed. Note that some backends such as Postgresql actually - execute - the two defaults on the database side to compare for equivalence. - - .. seealso:: - - :paramref:`.EnvironmentContext.configure.compare_type` - - :param include_name: A callable function which is given - the chance to return ``True`` or ``False`` for any database reflected - object based on its name, including database schema names when - the :paramref:`.EnvironmentContext.configure.include_schemas` flag - is set to ``True``. - - The function accepts the following positional arguments: - - * ``name``: the name of the object, such as schema name or table name. - Will be ``None`` when indicating the default schema name of the - database connection. - * ``type``: a string describing the type of object; currently - ``"schema"``, ``"table"``, ``"column"``, ``"index"``, - ``"unique_constraint"``, or ``"foreign_key_constraint"`` - * ``parent_names``: a dictionary of "parent" object names, that are - relative to the name being given. Keys in this dictionary may - include: ``"schema_name"``, ``"table_name"`` or - ``"schema_qualified_table_name"``. - - E.g.:: - - def include_name(name, type_, parent_names): - if type_ == "schema": - return name in ["schema_one", "schema_two"] - else: - return True - - context.configure( - # ... - include_schemas = True, - include_name = include_name - ) - - .. seealso:: - - :ref:`autogenerate_include_hooks` - - :paramref:`.EnvironmentContext.configure.include_object` - - :paramref:`.EnvironmentContext.configure.include_schemas` - - - :param include_object: A callable function which is given - the chance to return ``True`` or ``False`` for any object, - indicating if the given object should be considered in the - autogenerate sweep. - - The function accepts the following positional arguments: - - * ``object``: a :class:`~sqlalchemy.schema.SchemaItem` object such - as a :class:`~sqlalchemy.schema.Table`, - :class:`~sqlalchemy.schema.Column`, - :class:`~sqlalchemy.schema.Index` - :class:`~sqlalchemy.schema.UniqueConstraint`, - or :class:`~sqlalchemy.schema.ForeignKeyConstraint` object - * ``name``: the name of the object. This is typically available - via ``object.name``. - * ``type``: a string describing the type of object; currently - ``"table"``, ``"column"``, ``"index"``, ``"unique_constraint"``, - or ``"foreign_key_constraint"`` - * ``reflected``: ``True`` if the given object was produced based on - table reflection, ``False`` if it's from a local :class:`.MetaData` - object. - * ``compare_to``: the object being compared against, if available, - else ``None``. - - E.g.:: - - def include_object(object, name, type_, reflected, compare_to): - if (type_ == "column" and - not reflected and - object.info.get("skip_autogenerate", False)): - return False - else: - return True - - context.configure( - # ... - include_object = include_object - ) - - For the use case of omitting specific schemas from a target database - when :paramref:`.EnvironmentContext.configure.include_schemas` is - set to ``True``, the :attr:`~sqlalchemy.schema.Table.schema` - attribute can be checked for each :class:`~sqlalchemy.schema.Table` - object passed to the hook, however it is much more efficient - to filter on schemas before reflection of objects takes place - using the :paramref:`.EnvironmentContext.configure.include_name` - hook. - - .. seealso:: - - :ref:`autogenerate_include_hooks` - - :paramref:`.EnvironmentContext.configure.include_name` - - :paramref:`.EnvironmentContext.configure.include_schemas` - - :param render_as_batch: if True, commands which alter elements - within a table will be placed under a ``with batch_alter_table():`` - directive, so that batch migrations will take place. - - .. seealso:: - - :ref:`batch_migrations` - - :param include_schemas: If True, autogenerate will scan across - all schemas located by the SQLAlchemy - :meth:`~sqlalchemy.engine.reflection.Inspector.get_schema_names` - method, and include all differences in tables found across all - those schemas. When using this option, you may want to also - use the :paramref:`.EnvironmentContext.configure.include_name` - parameter to specify a callable which - can filter the tables/schemas that get included. - - .. seealso:: - - :ref:`autogenerate_include_hooks` - - :paramref:`.EnvironmentContext.configure.include_name` - - :paramref:`.EnvironmentContext.configure.include_object` - - :param render_item: Callable that can be used to override how - any schema item, i.e. column, constraint, type, - etc., is rendered for autogenerate. The callable receives a - string describing the type of object, the object, and - the autogen context. If it returns False, the - default rendering method will be used. If it returns None, - the item will not be rendered in the context of a Table - construct, that is, can be used to skip columns or constraints - within op.create_table():: - - def my_render_column(type_, col, autogen_context): - if type_ == "column" and isinstance(col, MySpecialCol): - return repr(col) - else: - return False - - context.configure( - # ... - render_item = my_render_column - ) - - Available values for the type string include: ``"column"``, - ``"primary_key"``, ``"foreign_key"``, ``"unique"``, ``"check"``, - ``"type"``, ``"server_default"``. - - .. seealso:: - - :ref:`autogen_render_types` - - :param upgrade_token: When autogenerate completes, the text of the - candidate upgrade operations will be present in this template - variable when ``script.py.mako`` is rendered. Defaults to - ``upgrades``. - :param downgrade_token: When autogenerate completes, the text of the - candidate downgrade operations will be present in this - template variable when ``script.py.mako`` is rendered. Defaults to - ``downgrades``. - - :param alembic_module_prefix: When autogenerate refers to Alembic - :mod:`alembic.operations` constructs, this prefix will be used - (i.e. ``op.create_table``) Defaults to "``op.``". - Can be ``None`` to indicate no prefix. - - :param sqlalchemy_module_prefix: When autogenerate refers to - SQLAlchemy - :class:`~sqlalchemy.schema.Column` or type classes, this prefix - will be used - (i.e. ``sa.Column("somename", sa.Integer)``) Defaults to "``sa.``". - Can be ``None`` to indicate no prefix. - Note that when dialect-specific types are rendered, autogenerate - will render them using the dialect module name, i.e. ``mssql.BIT()``, - ``postgresql.UUID()``. - - :param user_module_prefix: When autogenerate refers to a SQLAlchemy - type (e.g. :class:`.TypeEngine`) where the module name is not - under the ``sqlalchemy`` namespace, this prefix will be used - within autogenerate. If left at its default of - ``None``, the ``__module__`` attribute of the type is used to - render the import module. It's a good practice to set this - and to have all custom types be available from a fixed module space, - in order to future-proof migration files against reorganizations - in modules. - - .. seealso:: - - :ref:`autogen_module_prefix` - - :param process_revision_directives: a callable function that will - be passed a structure representing the end result of an autogenerate - or plain "revision" operation, which can be manipulated to affect - how the ``alembic revision`` command ultimately outputs new - revision scripts. The structure of the callable is:: - - def process_revision_directives(context, revision, directives): - pass - - The ``directives`` parameter is a Python list containing - a single :class:`.MigrationScript` directive, which represents - the revision file to be generated. This list as well as its - contents may be freely modified to produce any set of commands. - The section :ref:`customizing_revision` shows an example of - doing this. The ``context`` parameter is the - :class:`.MigrationContext` in use, - and ``revision`` is a tuple of revision identifiers representing the - current revision of the database. - - The callable is invoked at all times when the ``--autogenerate`` - option is passed to ``alembic revision``. If ``--autogenerate`` - is not passed, the callable is invoked only if the - ``revision_environment`` variable is set to True in the Alembic - configuration, in which case the given ``directives`` collection - will contain empty :class:`.UpgradeOps` and :class:`.DowngradeOps` - collections for ``.upgrade_ops`` and ``.downgrade_ops``. The - ``--autogenerate`` option itself can be inferred by inspecting - ``context.config.cmd_opts.autogenerate``. - - The callable function may optionally be an instance of - a :class:`.Rewriter` object. This is a helper object that - assists in the production of autogenerate-stream rewriter functions. - - .. seealso:: - - :ref:`customizing_revision` - - :ref:`autogen_rewriter` - - :paramref:`.command.revision.process_revision_directives` - - Parameters specific to individual backends: - - :param mssql_batch_separator: The "batch separator" which will - be placed between each statement when generating offline SQL Server - migrations. Defaults to ``GO``. Note this is in addition to the - customary semicolon ``;`` at the end of each statement; SQL Server - considers the "batch separator" to denote the end of an - individual statement execution, and cannot group certain - dependent operations in one step. - :param oracle_batch_separator: The "batch separator" which will - be placed between each statement when generating offline - Oracle migrations. Defaults to ``/``. Oracle doesn't add a - semicolon between statements like most other backends. - - """ - opts = self.context_opts - if transactional_ddl is not None: - opts["transactional_ddl"] = transactional_ddl - if output_buffer is not None: - opts["output_buffer"] = output_buffer - elif self.config.output_buffer is not None: - opts["output_buffer"] = self.config.output_buffer - if starting_rev: - opts["starting_rev"] = starting_rev - if tag: - opts["tag"] = tag - if template_args and "template_args" in opts: - opts["template_args"].update(template_args) - opts["transaction_per_migration"] = transaction_per_migration - opts["target_metadata"] = target_metadata - opts["include_name"] = include_name - opts["include_object"] = include_object - opts["include_schemas"] = include_schemas - opts["render_as_batch"] = render_as_batch - opts["upgrade_token"] = upgrade_token - opts["downgrade_token"] = downgrade_token - opts["sqlalchemy_module_prefix"] = sqlalchemy_module_prefix - opts["alembic_module_prefix"] = alembic_module_prefix - opts["user_module_prefix"] = user_module_prefix - opts["literal_binds"] = literal_binds - opts["process_revision_directives"] = process_revision_directives - opts["on_version_apply"] = util.to_tuple(on_version_apply, default=()) - - if render_item is not None: - opts["render_item"] = render_item - opts["compare_type"] = compare_type - if compare_server_default is not None: - opts["compare_server_default"] = compare_server_default - opts["script"] = self.script - - opts.update(kw) - - self._migration_context = MigrationContext.configure( - connection=connection, - url=url, - dialect_name=dialect_name, - environment_context=self, - dialect_opts=dialect_opts, - opts=opts, - ) - - def run_migrations(self, **kw: Any) -> None: - """Run migrations as determined by the current command line - configuration - as well as versioning information present (or not) in the current - database connection (if one is present). - - The function accepts optional ``**kw`` arguments. If these are - passed, they are sent directly to the ``upgrade()`` and - ``downgrade()`` - functions within each target revision file. By modifying the - ``script.py.mako`` file so that the ``upgrade()`` and ``downgrade()`` - functions accept arguments, parameters can be passed here so that - contextual information, usually information to identify a particular - database in use, can be passed from a custom ``env.py`` script - to the migration functions. - - This function requires that a :class:`.MigrationContext` has - first been made available via :meth:`.configure`. - - """ - assert self._migration_context is not None - with Operations.context(self._migration_context): - self.get_context().run_migrations(**kw) - - def execute( - self, - sql: Union[Executable, str], - execution_options: Optional[Dict[str, Any]] = None, - ) -> None: - """Execute the given SQL using the current change context. - - The behavior of :meth:`.execute` is the same - as that of :meth:`.Operations.execute`. Please see that - function's documentation for full detail including - caveats and limitations. - - This function requires that a :class:`.MigrationContext` has - first been made available via :meth:`.configure`. - - """ - self.get_context().execute(sql, execution_options=execution_options) - - def static_output(self, text: str) -> None: - """Emit text directly to the "offline" SQL stream. - - Typically this is for emitting comments that - start with --. The statement is not treated - as a SQL execution, no ; or batch separator - is added, etc. - - """ - self.get_context().impl.static_output(text) - - def begin_transaction( - self, - ) -> Union[_ProxyTransaction, ContextManager[None]]: - """Return a context manager that will - enclose an operation within a "transaction", - as defined by the environment's offline - and transactional DDL settings. - - e.g.:: - - with context.begin_transaction(): - context.run_migrations() - - :meth:`.begin_transaction` is intended to - "do the right thing" regardless of - calling context: - - * If :meth:`.is_transactional_ddl` is ``False``, - returns a "do nothing" context manager - which otherwise produces no transactional - state or directives. - * If :meth:`.is_offline_mode` is ``True``, - returns a context manager that will - invoke the :meth:`.DefaultImpl.emit_begin` - and :meth:`.DefaultImpl.emit_commit` - methods, which will produce the string - directives ``BEGIN`` and ``COMMIT`` on - the output stream, as rendered by the - target backend (e.g. SQL Server would - emit ``BEGIN TRANSACTION``). - * Otherwise, calls :meth:`sqlalchemy.engine.Connection.begin` - on the current online connection, which - returns a :class:`sqlalchemy.engine.Transaction` - object. This object demarcates a real - transaction and is itself a context manager, - which will roll back if an exception - is raised. - - Note that a custom ``env.py`` script which - has more specific transactional needs can of course - manipulate the :class:`~sqlalchemy.engine.Connection` - directly to produce transactional state in "online" - mode. - - """ - - return self.get_context().begin_transaction() - - def get_context(self) -> MigrationContext: - """Return the current :class:`.MigrationContext` object. - - If :meth:`.EnvironmentContext.configure` has not been - called yet, raises an exception. - - """ - - if self._migration_context is None: - raise Exception("No context has been configured yet.") - return self._migration_context - - def get_bind(self) -> Connection: - """Return the current 'bind'. - - In "online" mode, this is the - :class:`sqlalchemy.engine.Connection` currently being used - to emit SQL to the database. - - This function requires that a :class:`.MigrationContext` - has first been made available via :meth:`.configure`. - - """ - return self.get_context().bind # type: ignore[return-value] - - def get_impl(self) -> DefaultImpl: - return self.get_context().impl diff --git a/myenv/Lib/site-packages/alembic/runtime/migration.py b/myenv/Lib/site-packages/alembic/runtime/migration.py deleted file mode 100644 index 6cfe5e2..0000000 --- a/myenv/Lib/site-packages/alembic/runtime/migration.py +++ /dev/null @@ -1,1396 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -from contextlib import contextmanager -from contextlib import nullcontext -import logging -import sys -from typing import Any -from typing import Callable -from typing import cast -from typing import Collection -from typing import ContextManager -from typing import Dict -from typing import Iterable -from typing import Iterator -from typing import List -from typing import Optional -from typing import Set -from typing import Tuple -from typing import TYPE_CHECKING -from typing import Union - -from sqlalchemy import Column -from sqlalchemy import literal_column -from sqlalchemy import MetaData -from sqlalchemy import PrimaryKeyConstraint -from sqlalchemy import String -from sqlalchemy import Table -from sqlalchemy.engine import Engine -from sqlalchemy.engine import url as sqla_url -from sqlalchemy.engine.strategies import MockEngineStrategy - -from .. import ddl -from .. import util -from ..util import sqla_compat -from ..util.compat import EncodedIO - -if TYPE_CHECKING: - from sqlalchemy.engine import Dialect - from sqlalchemy.engine import URL - from sqlalchemy.engine.base import Connection - from sqlalchemy.engine.base import Transaction - from sqlalchemy.engine.mock import MockConnection - from sqlalchemy.sql import Executable - - from .environment import EnvironmentContext - from ..config import Config - from ..script.base import Script - from ..script.base import ScriptDirectory - from ..script.revision import _RevisionOrBase - from ..script.revision import Revision - from ..script.revision import RevisionMap - -log = logging.getLogger(__name__) - - -class _ProxyTransaction: - def __init__(self, migration_context: MigrationContext) -> None: - self.migration_context = migration_context - - @property - def _proxied_transaction(self) -> Optional[Transaction]: - return self.migration_context._transaction - - def rollback(self) -> None: - t = self._proxied_transaction - assert t is not None - t.rollback() - self.migration_context._transaction = None - - def commit(self) -> None: - t = self._proxied_transaction - assert t is not None - t.commit() - self.migration_context._transaction = None - - def __enter__(self) -> _ProxyTransaction: - return self - - def __exit__(self, type_: Any, value: Any, traceback: Any) -> None: - if self._proxied_transaction is not None: - self._proxied_transaction.__exit__(type_, value, traceback) - self.migration_context._transaction = None - - -class MigrationContext: - """Represent the database state made available to a migration - script. - - :class:`.MigrationContext` is the front end to an actual - database connection, or alternatively a string output - stream given a particular database dialect, - from an Alembic perspective. - - When inside the ``env.py`` script, the :class:`.MigrationContext` - is available via the - :meth:`.EnvironmentContext.get_context` method, - which is available at ``alembic.context``:: - - # from within env.py script - from alembic import context - - migration_context = context.get_context() - - For usage outside of an ``env.py`` script, such as for - utility routines that want to check the current version - in the database, the :meth:`.MigrationContext.configure` - method to create new :class:`.MigrationContext` objects. - For example, to get at the current revision in the - database using :meth:`.MigrationContext.get_current_revision`:: - - # in any application, outside of an env.py script - from alembic.migration import MigrationContext - from sqlalchemy import create_engine - - engine = create_engine("postgresql://mydatabase") - conn = engine.connect() - - context = MigrationContext.configure(conn) - current_rev = context.get_current_revision() - - The above context can also be used to produce - Alembic migration operations with an :class:`.Operations` - instance:: - - # in any application, outside of the normal Alembic environment - from alembic.operations import Operations - - op = Operations(context) - op.alter_column("mytable", "somecolumn", nullable=True) - - """ - - def __init__( - self, - dialect: Dialect, - connection: Optional[Connection], - opts: Dict[str, Any], - environment_context: Optional[EnvironmentContext] = None, - ) -> None: - self.environment_context = environment_context - self.opts = opts - self.dialect = dialect - self.script: Optional[ScriptDirectory] = opts.get("script") - as_sql: bool = opts.get("as_sql", False) - transactional_ddl = opts.get("transactional_ddl") - self._transaction_per_migration = opts.get( - "transaction_per_migration", False - ) - self.on_version_apply_callbacks = opts.get("on_version_apply", ()) - self._transaction: Optional[Transaction] = None - - if as_sql: - self.connection = cast( - Optional["Connection"], self._stdout_connection(connection) - ) - assert self.connection is not None - self._in_external_transaction = False - else: - self.connection = connection - self._in_external_transaction = ( - sqla_compat._get_connection_in_transaction(connection) - ) - - self._migrations_fn: Optional[ - Callable[..., Iterable[RevisionStep]] - ] = opts.get("fn") - self.as_sql = as_sql - - self.purge = opts.get("purge", False) - - if "output_encoding" in opts: - self.output_buffer = EncodedIO( - opts.get("output_buffer") - or sys.stdout, # type:ignore[arg-type] - opts["output_encoding"], - ) - else: - self.output_buffer = opts.get("output_buffer", sys.stdout) - - self._user_compare_type = opts.get("compare_type", True) - self._user_compare_server_default = opts.get( - "compare_server_default", False - ) - self.version_table = version_table = opts.get( - "version_table", "alembic_version" - ) - self.version_table_schema = version_table_schema = opts.get( - "version_table_schema", None - ) - self._version = Table( - version_table, - MetaData(), - Column("version_num", String(32), nullable=False), - schema=version_table_schema, - ) - if opts.get("version_table_pk", True): - self._version.append_constraint( - PrimaryKeyConstraint( - "version_num", name="%s_pkc" % version_table - ) - ) - - self._start_from_rev: Optional[str] = opts.get("starting_rev") - self.impl = ddl.DefaultImpl.get_by_dialect(dialect)( - dialect, - self.connection, - self.as_sql, - transactional_ddl, - self.output_buffer, - opts, - ) - log.info("Context impl %s.", self.impl.__class__.__name__) - if self.as_sql: - log.info("Generating static SQL") - log.info( - "Will assume %s DDL.", - ( - "transactional" - if self.impl.transactional_ddl - else "non-transactional" - ), - ) - - @classmethod - def configure( - cls, - connection: Optional[Connection] = None, - url: Optional[Union[str, URL]] = None, - dialect_name: Optional[str] = None, - dialect: Optional[Dialect] = None, - environment_context: Optional[EnvironmentContext] = None, - dialect_opts: Optional[Dict[str, str]] = None, - opts: Optional[Any] = None, - ) -> MigrationContext: - """Create a new :class:`.MigrationContext`. - - This is a factory method usually called - by :meth:`.EnvironmentContext.configure`. - - :param connection: a :class:`~sqlalchemy.engine.Connection` - to use for SQL execution in "online" mode. When present, - is also used to determine the type of dialect in use. - :param url: a string database url, or a - :class:`sqlalchemy.engine.url.URL` object. - The type of dialect to be used will be derived from this if - ``connection`` is not passed. - :param dialect_name: string name of a dialect, such as - "postgresql", "mssql", etc. The type of dialect to be used will be - derived from this if ``connection`` and ``url`` are not passed. - :param opts: dictionary of options. Most other options - accepted by :meth:`.EnvironmentContext.configure` are passed via - this dictionary. - - """ - if opts is None: - opts = {} - if dialect_opts is None: - dialect_opts = {} - - if connection: - if isinstance(connection, Engine): - raise util.CommandError( - "'connection' argument to configure() is expected " - "to be a sqlalchemy.engine.Connection instance, " - "got %r" % connection, - ) - - dialect = connection.dialect - elif url: - url_obj = sqla_url.make_url(url) - dialect = url_obj.get_dialect()(**dialect_opts) - elif dialect_name: - url_obj = sqla_url.make_url("%s://" % dialect_name) - dialect = url_obj.get_dialect()(**dialect_opts) - elif not dialect: - raise Exception("Connection, url, or dialect_name is required.") - assert dialect is not None - return MigrationContext(dialect, connection, opts, environment_context) - - @contextmanager - def autocommit_block(self) -> Iterator[None]: - """Enter an "autocommit" block, for databases that support AUTOCOMMIT - isolation levels. - - This special directive is intended to support the occasional database - DDL or system operation that specifically has to be run outside of - any kind of transaction block. The PostgreSQL database platform - is the most common target for this style of operation, as many - of its DDL operations must be run outside of transaction blocks, even - though the database overall supports transactional DDL. - - The method is used as a context manager within a migration script, by - calling on :meth:`.Operations.get_context` to retrieve the - :class:`.MigrationContext`, then invoking - :meth:`.MigrationContext.autocommit_block` using the ``with:`` - statement:: - - def upgrade(): - with op.get_context().autocommit_block(): - op.execute("ALTER TYPE mood ADD VALUE 'soso'") - - Above, a PostgreSQL "ALTER TYPE..ADD VALUE" directive is emitted, - which must be run outside of a transaction block at the database level. - The :meth:`.MigrationContext.autocommit_block` method makes use of the - SQLAlchemy ``AUTOCOMMIT`` isolation level setting, which against the - psycogp2 DBAPI corresponds to the ``connection.autocommit`` setting, - to ensure that the database driver is not inside of a DBAPI level - transaction block. - - .. warning:: - - As is necessary, **the database transaction preceding the block is - unconditionally committed**. This means that the run of migrations - preceding the operation will be committed, before the overall - migration operation is complete. - - It is recommended that when an application includes migrations with - "autocommit" blocks, that - :paramref:`.EnvironmentContext.transaction_per_migration` be used - so that the calling environment is tuned to expect short per-file - migrations whether or not one of them has an autocommit block. - - - """ - _in_connection_transaction = self._in_connection_transaction() - - if self.impl.transactional_ddl and self.as_sql: - self.impl.emit_commit() - - elif _in_connection_transaction: - assert self._transaction is not None - - self._transaction.commit() - self._transaction = None - - if not self.as_sql: - assert self.connection is not None - current_level = self.connection.get_isolation_level() - base_connection = self.connection - - # in 1.3 and 1.4 non-future mode, the connection gets switched - # out. we can use the base connection with the new mode - # except that it will not know it's in "autocommit" and will - # emit deprecation warnings when an autocommit action takes - # place. - self.connection = self.impl.connection = ( - base_connection.execution_options(isolation_level="AUTOCOMMIT") - ) - - # sqlalchemy future mode will "autobegin" in any case, so take - # control of that "transaction" here - fake_trans: Optional[Transaction] = self.connection.begin() - else: - fake_trans = None - try: - yield - finally: - if not self.as_sql: - assert self.connection is not None - if fake_trans is not None: - fake_trans.commit() - self.connection.execution_options( - isolation_level=current_level - ) - self.connection = self.impl.connection = base_connection - - if self.impl.transactional_ddl and self.as_sql: - self.impl.emit_begin() - - elif _in_connection_transaction: - assert self.connection is not None - self._transaction = self.connection.begin() - - def begin_transaction( - self, _per_migration: bool = False - ) -> Union[_ProxyTransaction, ContextManager[None]]: - """Begin a logical transaction for migration operations. - - This method is used within an ``env.py`` script to demarcate where - the outer "transaction" for a series of migrations begins. Example:: - - def run_migrations_online(): - connectable = create_engine(...) - - with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) - - with context.begin_transaction(): - context.run_migrations() - - Above, :meth:`.MigrationContext.begin_transaction` is used to demarcate - where the outer logical transaction occurs around the - :meth:`.MigrationContext.run_migrations` operation. - - A "Logical" transaction means that the operation may or may not - correspond to a real database transaction. If the target database - supports transactional DDL (or - :paramref:`.EnvironmentContext.configure.transactional_ddl` is true), - the :paramref:`.EnvironmentContext.configure.transaction_per_migration` - flag is not set, and the migration is against a real database - connection (as opposed to using "offline" ``--sql`` mode), a real - transaction will be started. If ``--sql`` mode is in effect, the - operation would instead correspond to a string such as "BEGIN" being - emitted to the string output. - - The returned object is a Python context manager that should only be - used in the context of a ``with:`` statement as indicated above. - The object has no other guaranteed API features present. - - .. seealso:: - - :meth:`.MigrationContext.autocommit_block` - - """ - - if self._in_external_transaction: - return nullcontext() - - if self.impl.transactional_ddl: - transaction_now = _per_migration == self._transaction_per_migration - else: - transaction_now = _per_migration is True - - if not transaction_now: - return nullcontext() - - elif not self.impl.transactional_ddl: - assert _per_migration - - if self.as_sql: - return nullcontext() - else: - # track our own notion of a "transaction block", which must be - # committed when complete. Don't rely upon whether or not the - # SQLAlchemy connection reports as "in transaction"; this - # because SQLAlchemy future connection features autobegin - # behavior, so it may already be in a transaction from our - # emitting of queries like "has_version_table", etc. While we - # could track these operations as well, that leaves open the - # possibility of new operations or other things happening in - # the user environment that still may be triggering - # "autobegin". - - in_transaction = self._transaction is not None - - if in_transaction: - return nullcontext() - else: - assert self.connection is not None - self._transaction = ( - sqla_compat._safe_begin_connection_transaction( - self.connection - ) - ) - return _ProxyTransaction(self) - elif self.as_sql: - - @contextmanager - def begin_commit(): - self.impl.emit_begin() - yield - self.impl.emit_commit() - - return begin_commit() - else: - assert self.connection is not None - self._transaction = sqla_compat._safe_begin_connection_transaction( - self.connection - ) - return _ProxyTransaction(self) - - def get_current_revision(self) -> Optional[str]: - """Return the current revision, usually that which is present - in the ``alembic_version`` table in the database. - - This method intends to be used only for a migration stream that - does not contain unmerged branches in the target database; - if there are multiple branches present, an exception is raised. - The :meth:`.MigrationContext.get_current_heads` should be preferred - over this method going forward in order to be compatible with - branch migration support. - - If this :class:`.MigrationContext` was configured in "offline" - mode, that is with ``as_sql=True``, the ``starting_rev`` - parameter is returned instead, if any. - - """ - heads = self.get_current_heads() - if len(heads) == 0: - return None - elif len(heads) > 1: - raise util.CommandError( - "Version table '%s' has more than one head present; " - "please use get_current_heads()" % self.version_table - ) - else: - return heads[0] - - def get_current_heads(self) -> Tuple[str, ...]: - """Return a tuple of the current 'head versions' that are represented - in the target database. - - For a migration stream without branches, this will be a single - value, synonymous with that of - :meth:`.MigrationContext.get_current_revision`. However when multiple - unmerged branches exist within the target database, the returned tuple - will contain a value for each head. - - If this :class:`.MigrationContext` was configured in "offline" - mode, that is with ``as_sql=True``, the ``starting_rev`` - parameter is returned in a one-length tuple. - - If no version table is present, or if there are no revisions - present, an empty tuple is returned. - - """ - if self.as_sql: - start_from_rev: Any = self._start_from_rev - if start_from_rev == "base": - start_from_rev = None - elif start_from_rev is not None and self.script: - start_from_rev = [ - self.script.get_revision(sfr).revision - for sfr in util.to_list(start_from_rev) - if sfr not in (None, "base") - ] - return util.to_tuple(start_from_rev, default=()) - else: - if self._start_from_rev: - raise util.CommandError( - "Can't specify current_rev to context " - "when using a database connection" - ) - if not self._has_version_table(): - return () - assert self.connection is not None - return tuple( - row[0] for row in self.connection.execute(self._version.select()) - ) - - def _ensure_version_table(self, purge: bool = False) -> None: - with sqla_compat._ensure_scope_for_ddl(self.connection): - assert self.connection is not None - self._version.create(self.connection, checkfirst=True) - if purge: - assert self.connection is not None - self.connection.execute(self._version.delete()) - - def _has_version_table(self) -> bool: - assert self.connection is not None - return sqla_compat._connectable_has_table( - self.connection, self.version_table, self.version_table_schema - ) - - def stamp(self, script_directory: ScriptDirectory, revision: str) -> None: - """Stamp the version table with a specific revision. - - This method calculates those branches to which the given revision - can apply, and updates those branches as though they were migrated - towards that revision (either up or down). If no current branches - include the revision, it is added as a new branch head. - - """ - heads = self.get_current_heads() - if not self.as_sql and not heads: - self._ensure_version_table() - head_maintainer = HeadMaintainer(self, heads) - for step in script_directory._stamp_revs(revision, heads): - head_maintainer.update_to_step(step) - - def run_migrations(self, **kw: Any) -> None: - r"""Run the migration scripts established for this - :class:`.MigrationContext`, if any. - - The commands in :mod:`alembic.command` will set up a function - that is ultimately passed to the :class:`.MigrationContext` - as the ``fn`` argument. This function represents the "work" - that will be done when :meth:`.MigrationContext.run_migrations` - is called, typically from within the ``env.py`` script of the - migration environment. The "work function" then provides an iterable - of version callables and other version information which - in the case of the ``upgrade`` or ``downgrade`` commands are the - list of version scripts to invoke. Other commands yield nothing, - in the case that a command wants to run some other operation - against the database such as the ``current`` or ``stamp`` commands. - - :param \**kw: keyword arguments here will be passed to each - migration callable, that is the ``upgrade()`` or ``downgrade()`` - method within revision scripts. - - """ - self.impl.start_migrations() - - heads: Tuple[str, ...] - if self.purge: - if self.as_sql: - raise util.CommandError("Can't use --purge with --sql mode") - self._ensure_version_table(purge=True) - heads = () - else: - heads = self.get_current_heads() - - dont_mutate = self.opts.get("dont_mutate", False) - - if not self.as_sql and not heads and not dont_mutate: - self._ensure_version_table() - - head_maintainer = HeadMaintainer(self, heads) - - assert self._migrations_fn is not None - for step in self._migrations_fn(heads, self): - with self.begin_transaction(_per_migration=True): - if self.as_sql and not head_maintainer.heads: - # for offline mode, include a CREATE TABLE from - # the base - assert self.connection is not None - self._version.create(self.connection) - log.info("Running %s", step) - if self.as_sql: - self.impl.static_output( - "-- Running %s" % (step.short_log,) - ) - step.migration_fn(**kw) - - # previously, we wouldn't stamp per migration - # if we were in a transaction, however given the more - # complex model that involves any number of inserts - # and row-targeted updates and deletes, it's simpler for now - # just to run the operations on every version - head_maintainer.update_to_step(step) - for callback in self.on_version_apply_callbacks: - callback( - ctx=self, - step=step.info, - heads=set(head_maintainer.heads), - run_args=kw, - ) - - if self.as_sql and not head_maintainer.heads: - assert self.connection is not None - self._version.drop(self.connection) - - def _in_connection_transaction(self) -> bool: - try: - meth = self.connection.in_transaction # type:ignore[union-attr] - except AttributeError: - return False - else: - return meth() - - def execute( - self, - sql: Union[Executable, str], - execution_options: Optional[Dict[str, Any]] = None, - ) -> None: - """Execute a SQL construct or string statement. - - The underlying execution mechanics are used, that is - if this is "offline mode" the SQL is written to the - output buffer, otherwise the SQL is emitted on - the current SQLAlchemy connection. - - """ - self.impl._exec(sql, execution_options) - - def _stdout_connection( - self, connection: Optional[Connection] - ) -> MockConnection: - def dump(construct, *multiparams, **params): - self.impl._exec(construct) - - return MockEngineStrategy.MockConnection(self.dialect, dump) - - @property - def bind(self) -> Optional[Connection]: - """Return the current "bind". - - In online mode, this is an instance of - :class:`sqlalchemy.engine.Connection`, and is suitable - for ad-hoc execution of any kind of usage described - in SQLAlchemy Core documentation as well as - for usage with the :meth:`sqlalchemy.schema.Table.create` - and :meth:`sqlalchemy.schema.MetaData.create_all` methods - of :class:`~sqlalchemy.schema.Table`, - :class:`~sqlalchemy.schema.MetaData`. - - Note that when "standard output" mode is enabled, - this bind will be a "mock" connection handler that cannot - return results and is only appropriate for a very limited - subset of commands. - - """ - return self.connection - - @property - def config(self) -> Optional[Config]: - """Return the :class:`.Config` used by the current environment, - if any.""" - - if self.environment_context: - return self.environment_context.config - else: - return None - - def _compare_type( - self, inspector_column: Column[Any], metadata_column: Column - ) -> bool: - if self._user_compare_type is False: - return False - - if callable(self._user_compare_type): - user_value = self._user_compare_type( - self, - inspector_column, - metadata_column, - inspector_column.type, - metadata_column.type, - ) - if user_value is not None: - return user_value - - return self.impl.compare_type(inspector_column, metadata_column) - - def _compare_server_default( - self, - inspector_column: Column[Any], - metadata_column: Column[Any], - rendered_metadata_default: Optional[str], - rendered_column_default: Optional[str], - ) -> bool: - if self._user_compare_server_default is False: - return False - - if callable(self._user_compare_server_default): - user_value = self._user_compare_server_default( - self, - inspector_column, - metadata_column, - rendered_column_default, - metadata_column.server_default, - rendered_metadata_default, - ) - if user_value is not None: - return user_value - - return self.impl.compare_server_default( - inspector_column, - metadata_column, - rendered_metadata_default, - rendered_column_default, - ) - - -class HeadMaintainer: - def __init__(self, context: MigrationContext, heads: Any) -> None: - self.context = context - self.heads = set(heads) - - def _insert_version(self, version: str) -> None: - assert version not in self.heads - self.heads.add(version) - - self.context.impl._exec( - self.context._version.insert().values( - version_num=literal_column("'%s'" % version) - ) - ) - - def _delete_version(self, version: str) -> None: - self.heads.remove(version) - - ret = self.context.impl._exec( - self.context._version.delete().where( - self.context._version.c.version_num - == literal_column("'%s'" % version) - ) - ) - - if ( - not self.context.as_sql - and self.context.dialect.supports_sane_rowcount - and ret is not None - and ret.rowcount != 1 - ): - raise util.CommandError( - "Online migration expected to match one " - "row when deleting '%s' in '%s'; " - "%d found" - % (version, self.context.version_table, ret.rowcount) - ) - - def _update_version(self, from_: str, to_: str) -> None: - assert to_ not in self.heads - self.heads.remove(from_) - self.heads.add(to_) - - ret = self.context.impl._exec( - self.context._version.update() - .values(version_num=literal_column("'%s'" % to_)) - .where( - self.context._version.c.version_num - == literal_column("'%s'" % from_) - ) - ) - - if ( - not self.context.as_sql - and self.context.dialect.supports_sane_rowcount - and ret is not None - and ret.rowcount != 1 - ): - raise util.CommandError( - "Online migration expected to match one " - "row when updating '%s' to '%s' in '%s'; " - "%d found" - % (from_, to_, self.context.version_table, ret.rowcount) - ) - - def update_to_step(self, step: Union[RevisionStep, StampStep]) -> None: - if step.should_delete_branch(self.heads): - vers = step.delete_version_num - log.debug("branch delete %s", vers) - self._delete_version(vers) - elif step.should_create_branch(self.heads): - vers = step.insert_version_num - log.debug("new branch insert %s", vers) - self._insert_version(vers) - elif step.should_merge_branches(self.heads): - # delete revs, update from rev, update to rev - ( - delete_revs, - update_from_rev, - update_to_rev, - ) = step.merge_branch_idents(self.heads) - log.debug( - "merge, delete %s, update %s to %s", - delete_revs, - update_from_rev, - update_to_rev, - ) - for delrev in delete_revs: - self._delete_version(delrev) - self._update_version(update_from_rev, update_to_rev) - elif step.should_unmerge_branches(self.heads): - ( - update_from_rev, - update_to_rev, - insert_revs, - ) = step.unmerge_branch_idents(self.heads) - log.debug( - "unmerge, insert %s, update %s to %s", - insert_revs, - update_from_rev, - update_to_rev, - ) - for insrev in insert_revs: - self._insert_version(insrev) - self._update_version(update_from_rev, update_to_rev) - else: - from_, to_ = step.update_version_num(self.heads) - log.debug("update %s to %s", from_, to_) - self._update_version(from_, to_) - - -class MigrationInfo: - """Exposes information about a migration step to a callback listener. - - The :class:`.MigrationInfo` object is available exclusively for the - benefit of the :paramref:`.EnvironmentContext.on_version_apply` - callback hook. - - """ - - is_upgrade: bool - """True/False: indicates whether this operation ascends or descends the - version tree.""" - - is_stamp: bool - """True/False: indicates whether this operation is a stamp (i.e. whether - it results in any actual database operations).""" - - up_revision_id: Optional[str] - """Version string corresponding to :attr:`.Revision.revision`. - - In the case of a stamp operation, it is advised to use the - :attr:`.MigrationInfo.up_revision_ids` tuple as a stamp operation can - make a single movement from one or more branches down to a single - branchpoint, in which case there will be multiple "up" revisions. - - .. seealso:: - - :attr:`.MigrationInfo.up_revision_ids` - - """ - - up_revision_ids: Tuple[str, ...] - """Tuple of version strings corresponding to :attr:`.Revision.revision`. - - In the majority of cases, this tuple will be a single value, synonymous - with the scalar value of :attr:`.MigrationInfo.up_revision_id`. - It can be multiple revision identifiers only in the case of an - ``alembic stamp`` operation which is moving downwards from multiple - branches down to their common branch point. - - """ - - down_revision_ids: Tuple[str, ...] - """Tuple of strings representing the base revisions of this migration step. - - If empty, this represents a root revision; otherwise, the first item - corresponds to :attr:`.Revision.down_revision`, and the rest are inferred - from dependencies. - """ - - revision_map: RevisionMap - """The revision map inside of which this operation occurs.""" - - def __init__( - self, - revision_map: RevisionMap, - is_upgrade: bool, - is_stamp: bool, - up_revisions: Union[str, Tuple[str, ...]], - down_revisions: Union[str, Tuple[str, ...]], - ) -> None: - self.revision_map = revision_map - self.is_upgrade = is_upgrade - self.is_stamp = is_stamp - self.up_revision_ids = util.to_tuple(up_revisions, default=()) - if self.up_revision_ids: - self.up_revision_id = self.up_revision_ids[0] - else: - # this should never be the case with - # "upgrade", "downgrade", or "stamp" as we are always - # measuring movement in terms of at least one upgrade version - self.up_revision_id = None - self.down_revision_ids = util.to_tuple(down_revisions, default=()) - - @property - def is_migration(self) -> bool: - """True/False: indicates whether this operation is a migration. - - At present this is true if and only the migration is not a stamp. - If other operation types are added in the future, both this attribute - and :attr:`~.MigrationInfo.is_stamp` will be false. - """ - return not self.is_stamp - - @property - def source_revision_ids(self) -> Tuple[str, ...]: - """Active revisions before this migration step is applied.""" - return ( - self.down_revision_ids if self.is_upgrade else self.up_revision_ids - ) - - @property - def destination_revision_ids(self) -> Tuple[str, ...]: - """Active revisions after this migration step is applied.""" - return ( - self.up_revision_ids if self.is_upgrade else self.down_revision_ids - ) - - @property - def up_revision(self) -> Optional[Revision]: - """Get :attr:`~.MigrationInfo.up_revision_id` as - a :class:`.Revision`. - - """ - return self.revision_map.get_revision(self.up_revision_id) - - @property - def up_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]: - """Get :attr:`~.MigrationInfo.up_revision_ids` as a - :class:`.Revision`.""" - return self.revision_map.get_revisions(self.up_revision_ids) - - @property - def down_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]: - """Get :attr:`~.MigrationInfo.down_revision_ids` as a tuple of - :class:`Revisions <.Revision>`.""" - return self.revision_map.get_revisions(self.down_revision_ids) - - @property - def source_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]: - """Get :attr:`~MigrationInfo.source_revision_ids` as a tuple of - :class:`Revisions <.Revision>`.""" - return self.revision_map.get_revisions(self.source_revision_ids) - - @property - def destination_revisions(self) -> Tuple[Optional[_RevisionOrBase], ...]: - """Get :attr:`~MigrationInfo.destination_revision_ids` as a tuple of - :class:`Revisions <.Revision>`.""" - return self.revision_map.get_revisions(self.destination_revision_ids) - - -class MigrationStep: - from_revisions_no_deps: Tuple[str, ...] - to_revisions_no_deps: Tuple[str, ...] - is_upgrade: bool - migration_fn: Any - - if TYPE_CHECKING: - - @property - def doc(self) -> Optional[str]: ... - - @property - def name(self) -> str: - return self.migration_fn.__name__ - - @classmethod - def upgrade_from_script( - cls, revision_map: RevisionMap, script: Script - ) -> RevisionStep: - return RevisionStep(revision_map, script, True) - - @classmethod - def downgrade_from_script( - cls, revision_map: RevisionMap, script: Script - ) -> RevisionStep: - return RevisionStep(revision_map, script, False) - - @property - def is_downgrade(self) -> bool: - return not self.is_upgrade - - @property - def short_log(self) -> str: - return "%s %s -> %s" % ( - self.name, - util.format_as_comma(self.from_revisions_no_deps), - util.format_as_comma(self.to_revisions_no_deps), - ) - - def __str__(self): - if self.doc: - return "%s %s -> %s, %s" % ( - self.name, - util.format_as_comma(self.from_revisions_no_deps), - util.format_as_comma(self.to_revisions_no_deps), - self.doc, - ) - else: - return self.short_log - - -class RevisionStep(MigrationStep): - def __init__( - self, revision_map: RevisionMap, revision: Script, is_upgrade: bool - ) -> None: - self.revision_map = revision_map - self.revision = revision - self.is_upgrade = is_upgrade - if is_upgrade: - self.migration_fn = revision.module.upgrade - else: - self.migration_fn = revision.module.downgrade - - def __repr__(self): - return "RevisionStep(%r, is_upgrade=%r)" % ( - self.revision.revision, - self.is_upgrade, - ) - - def __eq__(self, other: object) -> bool: - return ( - isinstance(other, RevisionStep) - and other.revision == self.revision - and self.is_upgrade == other.is_upgrade - ) - - @property - def doc(self) -> Optional[str]: - return self.revision.doc - - @property - def from_revisions(self) -> Tuple[str, ...]: - if self.is_upgrade: - return self.revision._normalized_down_revisions - else: - return (self.revision.revision,) - - @property - def from_revisions_no_deps( # type:ignore[override] - self, - ) -> Tuple[str, ...]: - if self.is_upgrade: - return self.revision._versioned_down_revisions - else: - return (self.revision.revision,) - - @property - def to_revisions(self) -> Tuple[str, ...]: - if self.is_upgrade: - return (self.revision.revision,) - else: - return self.revision._normalized_down_revisions - - @property - def to_revisions_no_deps( # type:ignore[override] - self, - ) -> Tuple[str, ...]: - if self.is_upgrade: - return (self.revision.revision,) - else: - return self.revision._versioned_down_revisions - - @property - def _has_scalar_down_revision(self) -> bool: - return len(self.revision._normalized_down_revisions) == 1 - - def should_delete_branch(self, heads: Set[str]) -> bool: - """A delete is when we are a. in a downgrade and b. - we are going to the "base" or we are going to a version that - is implied as a dependency on another version that is remaining. - - """ - if not self.is_downgrade: - return False - - if self.revision.revision not in heads: - return False - - downrevs = self.revision._normalized_down_revisions - - if not downrevs: - # is a base - return True - else: - # determine what the ultimate "to_revisions" for an - # unmerge would be. If there are none, then we're a delete. - to_revisions = self._unmerge_to_revisions(heads) - return not to_revisions - - def merge_branch_idents( - self, heads: Set[str] - ) -> Tuple[List[str], str, str]: - other_heads = set(heads).difference(self.from_revisions) - - if other_heads: - ancestors = { - r.revision - for r in self.revision_map._get_ancestor_nodes( - self.revision_map.get_revisions(other_heads), check=False - ) - } - from_revisions = list( - set(self.from_revisions).difference(ancestors) - ) - else: - from_revisions = list(self.from_revisions) - - return ( - # delete revs, update from rev, update to rev - list(from_revisions[0:-1]), - from_revisions[-1], - self.to_revisions[0], - ) - - def _unmerge_to_revisions(self, heads: Set[str]) -> Tuple[str, ...]: - other_heads = set(heads).difference([self.revision.revision]) - if other_heads: - ancestors = { - r.revision - for r in self.revision_map._get_ancestor_nodes( - self.revision_map.get_revisions(other_heads), check=False - ) - } - return tuple(set(self.to_revisions).difference(ancestors)) - else: - # for each revision we plan to return, compute its ancestors - # (excluding self), and remove those from the final output since - # they are already accounted for. - ancestors = { - r.revision - for to_revision in self.to_revisions - for r in self.revision_map._get_ancestor_nodes( - self.revision_map.get_revisions(to_revision), check=False - ) - if r.revision != to_revision - } - return tuple(set(self.to_revisions).difference(ancestors)) - - def unmerge_branch_idents( - self, heads: Set[str] - ) -> Tuple[str, str, Tuple[str, ...]]: - to_revisions = self._unmerge_to_revisions(heads) - - return ( - # update from rev, update to rev, insert revs - self.from_revisions[0], - to_revisions[-1], - to_revisions[0:-1], - ) - - def should_create_branch(self, heads: Set[str]) -> bool: - if not self.is_upgrade: - return False - - downrevs = self.revision._normalized_down_revisions - - if not downrevs: - # is a base - return True - else: - # none of our downrevs are present, so... - # we have to insert our version. This is true whether - # or not there is only one downrev, or multiple (in the latter - # case, we're a merge point.) - if not heads.intersection(downrevs): - return True - else: - return False - - def should_merge_branches(self, heads: Set[str]) -> bool: - if not self.is_upgrade: - return False - - downrevs = self.revision._normalized_down_revisions - - if len(downrevs) > 1 and len(heads.intersection(downrevs)) > 1: - return True - - return False - - def should_unmerge_branches(self, heads: Set[str]) -> bool: - if not self.is_downgrade: - return False - - downrevs = self.revision._normalized_down_revisions - - if self.revision.revision in heads and len(downrevs) > 1: - return True - - return False - - def update_version_num(self, heads: Set[str]) -> Tuple[str, str]: - if not self._has_scalar_down_revision: - downrev = heads.intersection( - self.revision._normalized_down_revisions - ) - assert ( - len(downrev) == 1 - ), "Can't do an UPDATE because downrevision is ambiguous" - down_revision = list(downrev)[0] - else: - down_revision = self.revision._normalized_down_revisions[0] - - if self.is_upgrade: - return down_revision, self.revision.revision - else: - return self.revision.revision, down_revision - - @property - def delete_version_num(self) -> str: - return self.revision.revision - - @property - def insert_version_num(self) -> str: - return self.revision.revision - - @property - def info(self) -> MigrationInfo: - return MigrationInfo( - revision_map=self.revision_map, - up_revisions=self.revision.revision, - down_revisions=self.revision._normalized_down_revisions, - is_upgrade=self.is_upgrade, - is_stamp=False, - ) - - -class StampStep(MigrationStep): - def __init__( - self, - from_: Optional[Union[str, Collection[str]]], - to_: Optional[Union[str, Collection[str]]], - is_upgrade: bool, - branch_move: bool, - revision_map: Optional[RevisionMap] = None, - ) -> None: - self.from_: Tuple[str, ...] = util.to_tuple(from_, default=()) - self.to_: Tuple[str, ...] = util.to_tuple(to_, default=()) - self.is_upgrade = is_upgrade - self.branch_move = branch_move - self.migration_fn = self.stamp_revision - self.revision_map = revision_map - - doc: Optional[str] = None - - def stamp_revision(self, **kw: Any) -> None: - return None - - def __eq__(self, other): - return ( - isinstance(other, StampStep) - and other.from_revisions == self.from_revisions - and other.to_revisions == self.to_revisions - and other.branch_move == self.branch_move - and self.is_upgrade == other.is_upgrade - ) - - @property - def from_revisions(self): - return self.from_ - - @property - def to_revisions(self) -> Tuple[str, ...]: - return self.to_ - - @property - def from_revisions_no_deps( # type:ignore[override] - self, - ) -> Tuple[str, ...]: - return self.from_ - - @property - def to_revisions_no_deps( # type:ignore[override] - self, - ) -> Tuple[str, ...]: - return self.to_ - - @property - def delete_version_num(self) -> str: - assert len(self.from_) == 1 - return self.from_[0] - - @property - def insert_version_num(self) -> str: - assert len(self.to_) == 1 - return self.to_[0] - - def update_version_num(self, heads: Set[str]) -> Tuple[str, str]: - assert len(self.from_) == 1 - assert len(self.to_) == 1 - return self.from_[0], self.to_[0] - - def merge_branch_idents( - self, heads: Union[Set[str], List[str]] - ) -> Union[Tuple[List[Any], str, str], Tuple[List[str], str, str]]: - return ( - # delete revs, update from rev, update to rev - list(self.from_[0:-1]), - self.from_[-1], - self.to_[0], - ) - - def unmerge_branch_idents( - self, heads: Set[str] - ) -> Tuple[str, str, List[str]]: - return ( - # update from rev, update to rev, insert revs - self.from_[0], - self.to_[-1], - list(self.to_[0:-1]), - ) - - def should_delete_branch(self, heads: Set[str]) -> bool: - # TODO: we probably need to look for self.to_ inside of heads, - # in a similar manner as should_create_branch, however we have - # no tests for this yet (stamp downgrades w/ branches) - return self.is_downgrade and self.branch_move - - def should_create_branch(self, heads: Set[str]) -> Union[Set[str], bool]: - return ( - self.is_upgrade - and (self.branch_move or set(self.from_).difference(heads)) - and set(self.to_).difference(heads) - ) - - def should_merge_branches(self, heads: Set[str]) -> bool: - return len(self.from_) > 1 - - def should_unmerge_branches(self, heads: Set[str]) -> bool: - return len(self.to_) > 1 - - @property - def info(self) -> MigrationInfo: - up, down = ( - (self.to_, self.from_) - if self.is_upgrade - else (self.from_, self.to_) - ) - assert self.revision_map is not None - return MigrationInfo( - revision_map=self.revision_map, - up_revisions=up, - down_revisions=down, - is_upgrade=self.is_upgrade, - is_stamp=True, - ) diff --git a/myenv/Lib/site-packages/alembic/script/__init__.py b/myenv/Lib/site-packages/alembic/script/__init__.py deleted file mode 100644 index d78f3f1..0000000 --- a/myenv/Lib/site-packages/alembic/script/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .base import Script -from .base import ScriptDirectory - -__all__ = ["ScriptDirectory", "Script"] diff --git a/myenv/Lib/site-packages/alembic/script/base.py b/myenv/Lib/site-packages/alembic/script/base.py deleted file mode 100644 index 30df6dd..0000000 --- a/myenv/Lib/site-packages/alembic/script/base.py +++ /dev/null @@ -1,1066 +0,0 @@ -from __future__ import annotations - -from contextlib import contextmanager -import datetime -import os -import re -import shutil -import sys -from types import ModuleType -from typing import Any -from typing import cast -from typing import Iterator -from typing import List -from typing import Mapping -from typing import Optional -from typing import Sequence -from typing import Set -from typing import Tuple -from typing import TYPE_CHECKING -from typing import Union - -from . import revision -from . import write_hooks -from .. import util -from ..runtime import migration -from ..util import compat -from ..util import not_none - -if TYPE_CHECKING: - from .revision import _GetRevArg - from .revision import _RevIdType - from .revision import Revision - from ..config import Config - from ..config import MessagingOptions - from ..runtime.migration import RevisionStep - from ..runtime.migration import StampStep - -try: - if compat.py39: - from zoneinfo import ZoneInfo - from zoneinfo import ZoneInfoNotFoundError - else: - from backports.zoneinfo import ZoneInfo # type: ignore[import-not-found,no-redef] # noqa: E501 - from backports.zoneinfo import ZoneInfoNotFoundError # type: ignore[no-redef] # noqa: E501 -except ImportError: - ZoneInfo = None # type: ignore[assignment, misc] - -_sourceless_rev_file = re.compile(r"(?!\.\#|__init__)(.*\.py)(c|o)?$") -_only_source_rev_file = re.compile(r"(?!\.\#|__init__)(.*\.py)$") -_legacy_rev = re.compile(r"([a-f0-9]+)\.py$") -_slug_re = re.compile(r"\w+") -_default_file_template = "%(rev)s_%(slug)s" -_split_on_space_comma = re.compile(r", *|(?: +)") - -_split_on_space_comma_colon = re.compile(r", *|(?: +)|\:") - - -class ScriptDirectory: - """Provides operations upon an Alembic script directory. - - This object is useful to get information as to current revisions, - most notably being able to get at the "head" revision, for schemes - that want to test if the current revision in the database is the most - recent:: - - from alembic.script import ScriptDirectory - from alembic.config import Config - config = Config() - config.set_main_option("script_location", "myapp:migrations") - script = ScriptDirectory.from_config(config) - - head_revision = script.get_current_head() - - - - """ - - def __init__( - self, - dir: str, # noqa - file_template: str = _default_file_template, - truncate_slug_length: Optional[int] = 40, - version_locations: Optional[List[str]] = None, - sourceless: bool = False, - output_encoding: str = "utf-8", - timezone: Optional[str] = None, - hook_config: Optional[Mapping[str, str]] = None, - recursive_version_locations: bool = False, - messaging_opts: MessagingOptions = cast( - "MessagingOptions", util.EMPTY_DICT - ), - ) -> None: - self.dir = dir - self.file_template = file_template - self.version_locations = version_locations - self.truncate_slug_length = truncate_slug_length or 40 - self.sourceless = sourceless - self.output_encoding = output_encoding - self.revision_map = revision.RevisionMap(self._load_revisions) - self.timezone = timezone - self.hook_config = hook_config - self.recursive_version_locations = recursive_version_locations - self.messaging_opts = messaging_opts - - if not os.access(dir, os.F_OK): - raise util.CommandError( - "Path doesn't exist: %r. Please use " - "the 'init' command to create a new " - "scripts folder." % os.path.abspath(dir) - ) - - @property - def versions(self) -> str: - loc = self._version_locations - if len(loc) > 1: - raise util.CommandError("Multiple version_locations present") - else: - return loc[0] - - @util.memoized_property - def _version_locations(self) -> Sequence[str]: - if self.version_locations: - return [ - os.path.abspath(util.coerce_resource_to_filename(location)) - for location in self.version_locations - ] - else: - return (os.path.abspath(os.path.join(self.dir, "versions")),) - - def _load_revisions(self) -> Iterator[Script]: - if self.version_locations: - paths = [ - vers - for vers in self._version_locations - if os.path.exists(vers) - ] - else: - paths = [self.versions] - - dupes = set() - for vers in paths: - for file_path in Script._list_py_dir(self, vers): - real_path = os.path.realpath(file_path) - if real_path in dupes: - util.warn( - "File %s loaded twice! ignoring. Please ensure " - "version_locations is unique." % real_path - ) - continue - dupes.add(real_path) - - filename = os.path.basename(real_path) - dir_name = os.path.dirname(real_path) - script = Script._from_filename(self, dir_name, filename) - if script is None: - continue - yield script - - @classmethod - def from_config(cls, config: Config) -> ScriptDirectory: - """Produce a new :class:`.ScriptDirectory` given a :class:`.Config` - instance. - - The :class:`.Config` need only have the ``script_location`` key - present. - - """ - script_location = config.get_main_option("script_location") - if script_location is None: - raise util.CommandError( - "No 'script_location' key " "found in configuration." - ) - truncate_slug_length: Optional[int] - tsl = config.get_main_option("truncate_slug_length") - if tsl is not None: - truncate_slug_length = int(tsl) - else: - truncate_slug_length = None - - version_locations_str = config.get_main_option("version_locations") - version_locations: Optional[List[str]] - if version_locations_str: - version_path_separator = config.get_main_option( - "version_path_separator" - ) - - split_on_path = { - None: None, - "space": " ", - "newline": "\n", - "os": os.pathsep, - ":": ":", - ";": ";", - } - - try: - split_char: Optional[str] = split_on_path[ - version_path_separator - ] - except KeyError as ke: - raise ValueError( - "'%s' is not a valid value for " - "version_path_separator; " - "expected 'space', 'newline', 'os', ':', ';'" - % version_path_separator - ) from ke - else: - if split_char is None: - # legacy behaviour for backwards compatibility - version_locations = _split_on_space_comma.split( - version_locations_str - ) - else: - version_locations = [ - x.strip() - for x in version_locations_str.split(split_char) - if x - ] - else: - version_locations = None - - prepend_sys_path = config.get_main_option("prepend_sys_path") - if prepend_sys_path: - sys.path[:0] = list( - _split_on_space_comma_colon.split(prepend_sys_path) - ) - - rvl = config.get_main_option("recursive_version_locations") == "true" - return ScriptDirectory( - util.coerce_resource_to_filename(script_location), - file_template=config.get_main_option( - "file_template", _default_file_template - ), - truncate_slug_length=truncate_slug_length, - sourceless=config.get_main_option("sourceless") == "true", - output_encoding=config.get_main_option("output_encoding", "utf-8"), - version_locations=version_locations, - timezone=config.get_main_option("timezone"), - hook_config=config.get_section("post_write_hooks", {}), - recursive_version_locations=rvl, - messaging_opts=config.messaging_opts, - ) - - @contextmanager - def _catch_revision_errors( - self, - ancestor: Optional[str] = None, - multiple_heads: Optional[str] = None, - start: Optional[str] = None, - end: Optional[str] = None, - resolution: Optional[str] = None, - ) -> Iterator[None]: - try: - yield - except revision.RangeNotAncestorError as rna: - if start is None: - start = cast(Any, rna.lower) - if end is None: - end = cast(Any, rna.upper) - if not ancestor: - ancestor = ( - "Requested range %(start)s:%(end)s does not refer to " - "ancestor/descendant revisions along the same branch" - ) - ancestor = ancestor % {"start": start, "end": end} - raise util.CommandError(ancestor) from rna - except revision.MultipleHeads as mh: - if not multiple_heads: - multiple_heads = ( - "Multiple head revisions are present for given " - "argument '%(head_arg)s'; please " - "specify a specific target revision, " - "'@%(head_arg)s' to " - "narrow to a specific head, or 'heads' for all heads" - ) - multiple_heads = multiple_heads % { - "head_arg": end or mh.argument, - "heads": util.format_as_comma(mh.heads), - } - raise util.CommandError(multiple_heads) from mh - except revision.ResolutionError as re: - if resolution is None: - resolution = "Can't locate revision identified by '%s'" % ( - re.argument - ) - raise util.CommandError(resolution) from re - except revision.RevisionError as err: - raise util.CommandError(err.args[0]) from err - - def walk_revisions( - self, base: str = "base", head: str = "heads" - ) -> Iterator[Script]: - """Iterate through all revisions. - - :param base: the base revision, or "base" to start from the - empty revision. - - :param head: the head revision; defaults to "heads" to indicate - all head revisions. May also be "head" to indicate a single - head revision. - - """ - with self._catch_revision_errors(start=base, end=head): - for rev in self.revision_map.iterate_revisions( - head, base, inclusive=True, assert_relative_length=False - ): - yield cast(Script, rev) - - def get_revisions(self, id_: _GetRevArg) -> Tuple[Script, ...]: - """Return the :class:`.Script` instance with the given rev identifier, - symbolic name, or sequence of identifiers. - - """ - with self._catch_revision_errors(): - return cast( - Tuple[Script, ...], - self.revision_map.get_revisions(id_), - ) - - def get_all_current(self, id_: Tuple[str, ...]) -> Set[Script]: - with self._catch_revision_errors(): - return cast(Set[Script], self.revision_map._get_all_current(id_)) - - def get_revision(self, id_: str) -> Script: - """Return the :class:`.Script` instance with the given rev id. - - .. seealso:: - - :meth:`.ScriptDirectory.get_revisions` - - """ - - with self._catch_revision_errors(): - return cast(Script, self.revision_map.get_revision(id_)) - - def as_revision_number( - self, id_: Optional[str] - ) -> Optional[Union[str, Tuple[str, ...]]]: - """Convert a symbolic revision, i.e. 'head' or 'base', into - an actual revision number.""" - - with self._catch_revision_errors(): - rev, branch_name = self.revision_map._resolve_revision_number(id_) - - if not rev: - # convert () to None - return None - elif id_ == "heads": - return rev - else: - return rev[0] - - def iterate_revisions( - self, - upper: Union[str, Tuple[str, ...], None], - lower: Union[str, Tuple[str, ...], None], - **kw: Any, - ) -> Iterator[Script]: - """Iterate through script revisions, starting at the given - upper revision identifier and ending at the lower. - - The traversal uses strictly the `down_revision` - marker inside each migration script, so - it is a requirement that upper >= lower, - else you'll get nothing back. - - The iterator yields :class:`.Script` objects. - - .. seealso:: - - :meth:`.RevisionMap.iterate_revisions` - - """ - return cast( - Iterator[Script], - self.revision_map.iterate_revisions(upper, lower, **kw), - ) - - def get_current_head(self) -> Optional[str]: - """Return the current head revision. - - If the script directory has multiple heads - due to branching, an error is raised; - :meth:`.ScriptDirectory.get_heads` should be - preferred. - - :return: a string revision number. - - .. seealso:: - - :meth:`.ScriptDirectory.get_heads` - - """ - with self._catch_revision_errors( - multiple_heads=( - "The script directory has multiple heads (due to branching)." - "Please use get_heads(), or merge the branches using " - "alembic merge." - ) - ): - return self.revision_map.get_current_head() - - def get_heads(self) -> List[str]: - """Return all "versioned head" revisions as strings. - - This is normally a list of length one, - unless branches are present. The - :meth:`.ScriptDirectory.get_current_head()` method - can be used normally when a script directory - has only one head. - - :return: a tuple of string revision numbers. - """ - return list(self.revision_map.heads) - - def get_base(self) -> Optional[str]: - """Return the "base" revision as a string. - - This is the revision number of the script that - has a ``down_revision`` of None. - - If the script directory has multiple bases, an error is raised; - :meth:`.ScriptDirectory.get_bases` should be - preferred. - - """ - bases = self.get_bases() - if len(bases) > 1: - raise util.CommandError( - "The script directory has multiple bases. " - "Please use get_bases()." - ) - elif bases: - return bases[0] - else: - return None - - def get_bases(self) -> List[str]: - """return all "base" revisions as strings. - - This is the revision number of all scripts that - have a ``down_revision`` of None. - - """ - return list(self.revision_map.bases) - - def _upgrade_revs( - self, destination: str, current_rev: str - ) -> List[RevisionStep]: - with self._catch_revision_errors( - ancestor="Destination %(end)s is not a valid upgrade " - "target from current head(s)", - end=destination, - ): - revs = self.iterate_revisions( - destination, current_rev, implicit_base=True - ) - return [ - migration.MigrationStep.upgrade_from_script( - self.revision_map, script - ) - for script in reversed(list(revs)) - ] - - def _downgrade_revs( - self, destination: str, current_rev: Optional[str] - ) -> List[RevisionStep]: - with self._catch_revision_errors( - ancestor="Destination %(end)s is not a valid downgrade " - "target from current head(s)", - end=destination, - ): - revs = self.iterate_revisions( - current_rev, destination, select_for_downgrade=True - ) - return [ - migration.MigrationStep.downgrade_from_script( - self.revision_map, script - ) - for script in revs - ] - - def _stamp_revs( - self, revision: _RevIdType, heads: _RevIdType - ) -> List[StampStep]: - with self._catch_revision_errors( - multiple_heads="Multiple heads are present; please specify a " - "single target revision" - ): - heads_revs = self.get_revisions(heads) - - steps = [] - - if not revision: - revision = "base" - - filtered_heads: List[Script] = [] - for rev in util.to_tuple(revision): - if rev: - filtered_heads.extend( - self.revision_map.filter_for_lineage( - cast(Sequence[Script], heads_revs), - rev, - include_dependencies=True, - ) - ) - filtered_heads = util.unique_list(filtered_heads) - - dests = self.get_revisions(revision) or [None] - - for dest in dests: - if dest is None: - # dest is 'base'. Return a "delete branch" migration - # for all applicable heads. - steps.extend( - [ - migration.StampStep( - head.revision, - None, - False, - True, - self.revision_map, - ) - for head in filtered_heads - ] - ) - continue - elif dest in filtered_heads: - # the dest is already in the version table, do nothing. - continue - - # figure out if the dest is a descendant or an - # ancestor of the selected nodes - descendants = set( - self.revision_map._get_descendant_nodes([dest]) - ) - ancestors = set(self.revision_map._get_ancestor_nodes([dest])) - - if descendants.intersection(filtered_heads): - # heads are above the target, so this is a downgrade. - # we can treat them as a "merge", single step. - assert not ancestors.intersection(filtered_heads) - todo_heads = [head.revision for head in filtered_heads] - step = migration.StampStep( - todo_heads, - dest.revision, - False, - False, - self.revision_map, - ) - steps.append(step) - continue - elif ancestors.intersection(filtered_heads): - # heads are below the target, so this is an upgrade. - # we can treat them as a "merge", single step. - todo_heads = [head.revision for head in filtered_heads] - step = migration.StampStep( - todo_heads, - dest.revision, - True, - False, - self.revision_map, - ) - steps.append(step) - continue - else: - # destination is in a branch not represented, - # treat it as new branch - step = migration.StampStep( - (), dest.revision, True, True, self.revision_map - ) - steps.append(step) - continue - - return steps - - def run_env(self) -> None: - """Run the script environment. - - This basically runs the ``env.py`` script present - in the migration environment. It is called exclusively - by the command functions in :mod:`alembic.command`. - - - """ - util.load_python_file(self.dir, "env.py") - - @property - def env_py_location(self) -> str: - return os.path.abspath(os.path.join(self.dir, "env.py")) - - def _generate_template(self, src: str, dest: str, **kw: Any) -> None: - with util.status( - f"Generating {os.path.abspath(dest)}", **self.messaging_opts - ): - util.template_to_file(src, dest, self.output_encoding, **kw) - - def _copy_file(self, src: str, dest: str) -> None: - with util.status( - f"Generating {os.path.abspath(dest)}", **self.messaging_opts - ): - shutil.copy(src, dest) - - def _ensure_directory(self, path: str) -> None: - path = os.path.abspath(path) - if not os.path.exists(path): - with util.status( - f"Creating directory {path}", **self.messaging_opts - ): - os.makedirs(path) - - def _generate_create_date(self) -> datetime.datetime: - if self.timezone is not None: - if ZoneInfo is None: - raise util.CommandError( - "Python >= 3.9 is required for timezone support or " - "the 'backports.zoneinfo' package must be installed." - ) - # First, assume correct capitalization - try: - tzinfo = ZoneInfo(self.timezone) - except ZoneInfoNotFoundError: - tzinfo = None - if tzinfo is None: - try: - tzinfo = ZoneInfo(self.timezone.upper()) - except ZoneInfoNotFoundError: - raise util.CommandError( - "Can't locate timezone: %s" % self.timezone - ) from None - create_date = ( - datetime.datetime.utcnow() - .replace(tzinfo=datetime.timezone.utc) - .astimezone(tzinfo) - ) - else: - create_date = datetime.datetime.now() - return create_date - - def generate_revision( - self, - revid: str, - message: Optional[str], - head: Optional[_RevIdType] = None, - splice: Optional[bool] = False, - branch_labels: Optional[_RevIdType] = None, - version_path: Optional[str] = None, - depends_on: Optional[_RevIdType] = None, - **kw: Any, - ) -> Optional[Script]: - """Generate a new revision file. - - This runs the ``script.py.mako`` template, given - template arguments, and creates a new file. - - :param revid: String revision id. Typically this - comes from ``alembic.util.rev_id()``. - :param message: the revision message, the one passed - by the -m argument to the ``revision`` command. - :param head: the head revision to generate against. Defaults - to the current "head" if no branches are present, else raises - an exception. - :param splice: if True, allow the "head" version to not be an - actual head; otherwise, the selected head must be a head - (e.g. endpoint) revision. - - """ - if head is None: - head = "head" - - try: - Script.verify_rev_id(revid) - except revision.RevisionError as err: - raise util.CommandError(err.args[0]) from err - - with self._catch_revision_errors( - multiple_heads=( - "Multiple heads are present; please specify the head " - "revision on which the new revision should be based, " - "or perform a merge." - ) - ): - heads = cast( - Tuple[Optional["Revision"], ...], - self.revision_map.get_revisions(head), - ) - for h in heads: - assert h != "base" # type: ignore[comparison-overlap] - - if len(set(heads)) != len(heads): - raise util.CommandError("Duplicate head revisions specified") - - create_date = self._generate_create_date() - - if version_path is None: - if len(self._version_locations) > 1: - for head_ in heads: - if head_ is not None: - assert isinstance(head_, Script) - version_path = os.path.dirname(head_.path) - break - else: - raise util.CommandError( - "Multiple version locations present, " - "please specify --version-path" - ) - else: - version_path = self.versions - - norm_path = os.path.normpath(os.path.abspath(version_path)) - for vers_path in self._version_locations: - if os.path.normpath(vers_path) == norm_path: - break - else: - raise util.CommandError( - "Path %s is not represented in current " - "version locations" % version_path - ) - - if self.version_locations: - self._ensure_directory(version_path) - - path = self._rev_path(version_path, revid, message, create_date) - - if not splice: - for head_ in heads: - if head_ is not None and not head_.is_head: - raise util.CommandError( - "Revision %s is not a head revision; please specify " - "--splice to create a new branch from this revision" - % head_.revision - ) - - resolved_depends_on: Optional[List[str]] - if depends_on: - with self._catch_revision_errors(): - resolved_depends_on = [ - ( - dep - if dep in rev.branch_labels # maintain branch labels - else rev.revision - ) # resolve partial revision identifiers - for rev, dep in [ - (not_none(self.revision_map.get_revision(dep)), dep) - for dep in util.to_list(depends_on) - ] - ] - else: - resolved_depends_on = None - - self._generate_template( - os.path.join(self.dir, "script.py.mako"), - path, - up_revision=str(revid), - down_revision=revision.tuple_rev_as_scalar( - tuple(h.revision if h is not None else None for h in heads) - ), - branch_labels=util.to_tuple(branch_labels), - depends_on=revision.tuple_rev_as_scalar(resolved_depends_on), - create_date=create_date, - comma=util.format_as_comma, - message=message if message is not None else ("empty message"), - **kw, - ) - - post_write_hooks = self.hook_config - if post_write_hooks: - write_hooks._run_hooks(path, post_write_hooks) - - try: - script = Script._from_path(self, path) - except revision.RevisionError as err: - raise util.CommandError(err.args[0]) from err - if script is None: - return None - if branch_labels and not script.branch_labels: - raise util.CommandError( - "Version %s specified branch_labels %s, however the " - "migration file %s does not have them; have you upgraded " - "your script.py.mako to include the " - "'branch_labels' section?" - % (script.revision, branch_labels, script.path) - ) - self.revision_map.add_revision(script) - return script - - def _rev_path( - self, - path: str, - rev_id: str, - message: Optional[str], - create_date: datetime.datetime, - ) -> str: - epoch = int(create_date.timestamp()) - slug = "_".join(_slug_re.findall(message or "")).lower() - if len(slug) > self.truncate_slug_length: - slug = slug[: self.truncate_slug_length].rsplit("_", 1)[0] + "_" - filename = "%s.py" % ( - self.file_template - % { - "rev": rev_id, - "slug": slug, - "epoch": epoch, - "year": create_date.year, - "month": create_date.month, - "day": create_date.day, - "hour": create_date.hour, - "minute": create_date.minute, - "second": create_date.second, - } - ) - return os.path.join(path, filename) - - -class Script(revision.Revision): - """Represent a single revision file in a ``versions/`` directory. - - The :class:`.Script` instance is returned by methods - such as :meth:`.ScriptDirectory.iterate_revisions`. - - """ - - def __init__(self, module: ModuleType, rev_id: str, path: str): - self.module = module - self.path = path - super().__init__( - rev_id, - module.down_revision, - branch_labels=util.to_tuple( - getattr(module, "branch_labels", None), default=() - ), - dependencies=util.to_tuple( - getattr(module, "depends_on", None), default=() - ), - ) - - module: ModuleType - """The Python module representing the actual script itself.""" - - path: str - """Filesystem path of the script.""" - - _db_current_indicator: Optional[bool] = None - """Utility variable which when set will cause string output to indicate - this is a "current" version in some database""" - - @property - def doc(self) -> str: - """Return the docstring given in the script.""" - - return re.split("\n\n", self.longdoc)[0] - - @property - def longdoc(self) -> str: - """Return the docstring given in the script.""" - - doc = self.module.__doc__ - if doc: - if hasattr(self.module, "_alembic_source_encoding"): - doc = doc.decode( # type: ignore[attr-defined] - self.module._alembic_source_encoding - ) - return doc.strip() # type: ignore[union-attr] - else: - return "" - - @property - def log_entry(self) -> str: - entry = "Rev: %s%s%s%s%s\n" % ( - self.revision, - " (head)" if self.is_head else "", - " (branchpoint)" if self.is_branch_point else "", - " (mergepoint)" if self.is_merge_point else "", - " (current)" if self._db_current_indicator else "", - ) - if self.is_merge_point: - entry += "Merges: %s\n" % (self._format_down_revision(),) - else: - entry += "Parent: %s\n" % (self._format_down_revision(),) - - if self.dependencies: - entry += "Also depends on: %s\n" % ( - util.format_as_comma(self.dependencies) - ) - - if self.is_branch_point: - entry += "Branches into: %s\n" % ( - util.format_as_comma(self.nextrev) - ) - - if self.branch_labels: - entry += "Branch names: %s\n" % ( - util.format_as_comma(self.branch_labels), - ) - - entry += "Path: %s\n" % (self.path,) - - entry += "\n%s\n" % ( - "\n".join(" %s" % para for para in self.longdoc.splitlines()) - ) - return entry - - def __str__(self) -> str: - return "%s -> %s%s%s%s, %s" % ( - self._format_down_revision(), - self.revision, - " (head)" if self.is_head else "", - " (branchpoint)" if self.is_branch_point else "", - " (mergepoint)" if self.is_merge_point else "", - self.doc, - ) - - def _head_only( - self, - include_branches: bool = False, - include_doc: bool = False, - include_parents: bool = False, - tree_indicators: bool = True, - head_indicators: bool = True, - ) -> str: - text = self.revision - if include_parents: - if self.dependencies: - text = "%s (%s) -> %s" % ( - self._format_down_revision(), - util.format_as_comma(self.dependencies), - text, - ) - else: - text = "%s -> %s" % (self._format_down_revision(), text) - assert text is not None - if include_branches and self.branch_labels: - text += " (%s)" % util.format_as_comma(self.branch_labels) - if head_indicators or tree_indicators: - text += "%s%s%s" % ( - " (head)" if self._is_real_head else "", - ( - " (effective head)" - if self.is_head and not self._is_real_head - else "" - ), - " (current)" if self._db_current_indicator else "", - ) - if tree_indicators: - text += "%s%s" % ( - " (branchpoint)" if self.is_branch_point else "", - " (mergepoint)" if self.is_merge_point else "", - ) - if include_doc: - text += ", %s" % self.doc - return text - - def cmd_format( - self, - verbose: bool, - include_branches: bool = False, - include_doc: bool = False, - include_parents: bool = False, - tree_indicators: bool = True, - ) -> str: - if verbose: - return self.log_entry - else: - return self._head_only( - include_branches, include_doc, include_parents, tree_indicators - ) - - def _format_down_revision(self) -> str: - if not self.down_revision: - return "" - else: - return util.format_as_comma(self._versioned_down_revisions) - - @classmethod - def _from_path( - cls, scriptdir: ScriptDirectory, path: str - ) -> Optional[Script]: - dir_, filename = os.path.split(path) - return cls._from_filename(scriptdir, dir_, filename) - - @classmethod - def _list_py_dir(cls, scriptdir: ScriptDirectory, path: str) -> List[str]: - paths = [] - for root, dirs, files in os.walk(path, topdown=True): - if root.endswith("__pycache__"): - # a special case - we may include these files - # if a `sourceless` option is specified - continue - - for filename in sorted(files): - paths.append(os.path.join(root, filename)) - - if scriptdir.sourceless: - # look for __pycache__ - py_cache_path = os.path.join(root, "__pycache__") - if os.path.exists(py_cache_path): - # add all files from __pycache__ whose filename is not - # already in the names we got from the version directory. - # add as relative paths including __pycache__ token - names = {filename.split(".")[0] for filename in files} - paths.extend( - os.path.join(py_cache_path, pyc) - for pyc in os.listdir(py_cache_path) - if pyc.split(".")[0] not in names - ) - - if not scriptdir.recursive_version_locations: - break - - # the real script order is defined by revision, - # but it may be undefined if there are many files with a same - # `down_revision`, for a better user experience (ex. debugging), - # we use a deterministic order - dirs.sort() - - return paths - - @classmethod - def _from_filename( - cls, scriptdir: ScriptDirectory, dir_: str, filename: str - ) -> Optional[Script]: - if scriptdir.sourceless: - py_match = _sourceless_rev_file.match(filename) - else: - py_match = _only_source_rev_file.match(filename) - - if not py_match: - return None - - py_filename = py_match.group(1) - - if scriptdir.sourceless: - is_c = py_match.group(2) == "c" - is_o = py_match.group(2) == "o" - else: - is_c = is_o = False - - if is_o or is_c: - py_exists = os.path.exists(os.path.join(dir_, py_filename)) - pyc_exists = os.path.exists(os.path.join(dir_, py_filename + "c")) - - # prefer .py over .pyc because we'd like to get the - # source encoding; prefer .pyc over .pyo because we'd like to - # have the docstrings which a -OO file would not have - if py_exists or is_o and pyc_exists: - return None - - module = util.load_python_file(dir_, filename) - - if not hasattr(module, "revision"): - # attempt to get the revision id from the script name, - # this for legacy only - m = _legacy_rev.match(filename) - if not m: - raise util.CommandError( - "Could not determine revision id from filename %s. " - "Be sure the 'revision' variable is " - "declared inside the script (please see 'Upgrading " - "from Alembic 0.1 to 0.2' in the documentation)." - % filename - ) - else: - revision = m.group(1) - else: - revision = module.revision - return Script(module, revision, os.path.join(dir_, filename)) diff --git a/myenv/Lib/site-packages/alembic/script/revision.py b/myenv/Lib/site-packages/alembic/script/revision.py deleted file mode 100644 index c3108e9..0000000 --- a/myenv/Lib/site-packages/alembic/script/revision.py +++ /dev/null @@ -1,1728 +0,0 @@ -from __future__ import annotations - -import collections -import re -from typing import Any -from typing import Callable -from typing import cast -from typing import Collection -from typing import Deque -from typing import Dict -from typing import FrozenSet -from typing import Iterable -from typing import Iterator -from typing import List -from typing import Optional -from typing import overload -from typing import Protocol -from typing import Sequence -from typing import Set -from typing import Tuple -from typing import TYPE_CHECKING -from typing import TypeVar -from typing import Union - -from sqlalchemy import util as sqlautil - -from .. import util -from ..util import not_none - -if TYPE_CHECKING: - from typing import Literal - -_RevIdType = Union[str, List[str], Tuple[str, ...]] -_GetRevArg = Union[ - str, - Iterable[Optional[str]], - Iterable[str], -] -_RevisionIdentifierType = Union[str, Tuple[str, ...], None] -_RevisionOrStr = Union["Revision", str] -_RevisionOrBase = Union["Revision", "Literal['base']"] -_InterimRevisionMapType = Dict[str, "Revision"] -_RevisionMapType = Dict[Union[None, str, Tuple[()]], Optional["Revision"]] -_T = TypeVar("_T") -_TR = TypeVar("_TR", bound=Optional[_RevisionOrStr]) - -_relative_destination = re.compile(r"(?:(.+?)@)?(\w+)?((?:\+|-)\d+)") -_revision_illegal_chars = ["@", "-", "+"] - - -class _CollectRevisionsProtocol(Protocol): - def __call__( - self, - upper: _RevisionIdentifierType, - lower: _RevisionIdentifierType, - inclusive: bool, - implicit_base: bool, - assert_relative_length: bool, - ) -> Tuple[Set[Revision], Tuple[Optional[_RevisionOrBase], ...]]: ... - - -class RevisionError(Exception): - pass - - -class RangeNotAncestorError(RevisionError): - def __init__( - self, lower: _RevisionIdentifierType, upper: _RevisionIdentifierType - ) -> None: - self.lower = lower - self.upper = upper - super().__init__( - "Revision %s is not an ancestor of revision %s" - % (lower or "base", upper or "base") - ) - - -class MultipleHeads(RevisionError): - def __init__(self, heads: Sequence[str], argument: Optional[str]) -> None: - self.heads = heads - self.argument = argument - super().__init__( - "Multiple heads are present for given argument '%s'; " - "%s" % (argument, ", ".join(heads)) - ) - - -class ResolutionError(RevisionError): - def __init__(self, message: str, argument: str) -> None: - super().__init__(message) - self.argument = argument - - -class CycleDetected(RevisionError): - kind = "Cycle" - - def __init__(self, revisions: Sequence[str]) -> None: - self.revisions = revisions - super().__init__( - "%s is detected in revisions (%s)" - % (self.kind, ", ".join(revisions)) - ) - - -class DependencyCycleDetected(CycleDetected): - kind = "Dependency cycle" - - def __init__(self, revisions: Sequence[str]) -> None: - super().__init__(revisions) - - -class LoopDetected(CycleDetected): - kind = "Self-loop" - - def __init__(self, revision: str) -> None: - super().__init__([revision]) - - -class DependencyLoopDetected(DependencyCycleDetected, LoopDetected): - kind = "Dependency self-loop" - - def __init__(self, revision: Sequence[str]) -> None: - super().__init__(revision) - - -class RevisionMap: - """Maintains a map of :class:`.Revision` objects. - - :class:`.RevisionMap` is used by :class:`.ScriptDirectory` to maintain - and traverse the collection of :class:`.Script` objects, which are - themselves instances of :class:`.Revision`. - - """ - - def __init__(self, generator: Callable[[], Iterable[Revision]]) -> None: - """Construct a new :class:`.RevisionMap`. - - :param generator: a zero-arg callable that will generate an iterable - of :class:`.Revision` instances to be used. These are typically - :class:`.Script` subclasses within regular Alembic use. - - """ - self._generator = generator - - @util.memoized_property - def heads(self) -> Tuple[str, ...]: - """All "head" revisions as strings. - - This is normally a tuple of length one, - unless unmerged branches are present. - - :return: a tuple of string revision numbers. - - """ - self._revision_map - return self.heads - - @util.memoized_property - def bases(self) -> Tuple[str, ...]: - """All "base" revisions as strings. - - These are revisions that have a ``down_revision`` of None, - or empty tuple. - - :return: a tuple of string revision numbers. - - """ - self._revision_map - return self.bases - - @util.memoized_property - def _real_heads(self) -> Tuple[str, ...]: - """All "real" head revisions as strings. - - :return: a tuple of string revision numbers. - - """ - self._revision_map - return self._real_heads - - @util.memoized_property - def _real_bases(self) -> Tuple[str, ...]: - """All "real" base revisions as strings. - - :return: a tuple of string revision numbers. - - """ - self._revision_map - return self._real_bases - - @util.memoized_property - def _revision_map(self) -> _RevisionMapType: - """memoized attribute, initializes the revision map from the - initial collection. - - """ - # Ordering required for some tests to pass (but not required in - # general) - map_: _InterimRevisionMapType = sqlautil.OrderedDict() - - heads: Set[Revision] = sqlautil.OrderedSet() - _real_heads: Set[Revision] = sqlautil.OrderedSet() - bases: Tuple[Revision, ...] = () - _real_bases: Tuple[Revision, ...] = () - - has_branch_labels = set() - all_revisions = set() - - for revision in self._generator(): - all_revisions.add(revision) - - if revision.revision in map_: - util.warn( - "Revision %s is present more than once" % revision.revision - ) - map_[revision.revision] = revision - if revision.branch_labels: - has_branch_labels.add(revision) - - heads.add(revision) - _real_heads.add(revision) - if revision.is_base: - bases += (revision,) - if revision._is_real_base: - _real_bases += (revision,) - - # add the branch_labels to the map_. We'll need these - # to resolve the dependencies. - rev_map = map_.copy() - self._map_branch_labels( - has_branch_labels, cast(_RevisionMapType, map_) - ) - - # resolve dependency names from branch labels and symbolic - # names - self._add_depends_on(all_revisions, cast(_RevisionMapType, map_)) - - for rev in map_.values(): - for downrev in rev._all_down_revisions: - if downrev not in map_: - util.warn( - "Revision %s referenced from %s is not present" - % (downrev, rev) - ) - down_revision = map_[downrev] - down_revision.add_nextrev(rev) - if downrev in rev._versioned_down_revisions: - heads.discard(down_revision) - _real_heads.discard(down_revision) - - # once the map has downrevisions populated, the dependencies - # can be further refined to include only those which are not - # already ancestors - self._normalize_depends_on(all_revisions, cast(_RevisionMapType, map_)) - self._detect_cycles(rev_map, heads, bases, _real_heads, _real_bases) - - revision_map: _RevisionMapType = dict(map_.items()) - revision_map[None] = revision_map[()] = None - self.heads = tuple(rev.revision for rev in heads) - self._real_heads = tuple(rev.revision for rev in _real_heads) - self.bases = tuple(rev.revision for rev in bases) - self._real_bases = tuple(rev.revision for rev in _real_bases) - - self._add_branches(has_branch_labels, revision_map) - return revision_map - - def _detect_cycles( - self, - rev_map: _InterimRevisionMapType, - heads: Set[Revision], - bases: Tuple[Revision, ...], - _real_heads: Set[Revision], - _real_bases: Tuple[Revision, ...], - ) -> None: - if not rev_map: - return - if not heads or not bases: - raise CycleDetected(list(rev_map)) - total_space = { - rev.revision - for rev in self._iterate_related_revisions( - lambda r: r._versioned_down_revisions, - heads, - map_=cast(_RevisionMapType, rev_map), - ) - }.intersection( - rev.revision - for rev in self._iterate_related_revisions( - lambda r: r.nextrev, - bases, - map_=cast(_RevisionMapType, rev_map), - ) - ) - deleted_revs = set(rev_map.keys()) - total_space - if deleted_revs: - raise CycleDetected(sorted(deleted_revs)) - - if not _real_heads or not _real_bases: - raise DependencyCycleDetected(list(rev_map)) - total_space = { - rev.revision - for rev in self._iterate_related_revisions( - lambda r: r._all_down_revisions, - _real_heads, - map_=cast(_RevisionMapType, rev_map), - ) - }.intersection( - rev.revision - for rev in self._iterate_related_revisions( - lambda r: r._all_nextrev, - _real_bases, - map_=cast(_RevisionMapType, rev_map), - ) - ) - deleted_revs = set(rev_map.keys()) - total_space - if deleted_revs: - raise DependencyCycleDetected(sorted(deleted_revs)) - - def _map_branch_labels( - self, revisions: Collection[Revision], map_: _RevisionMapType - ) -> None: - for revision in revisions: - if revision.branch_labels: - assert revision._orig_branch_labels is not None - for branch_label in revision._orig_branch_labels: - if branch_label in map_: - map_rev = map_[branch_label] - assert map_rev is not None - raise RevisionError( - "Branch name '%s' in revision %s already " - "used by revision %s" - % ( - branch_label, - revision.revision, - map_rev.revision, - ) - ) - map_[branch_label] = revision - - def _add_branches( - self, revisions: Collection[Revision], map_: _RevisionMapType - ) -> None: - for revision in revisions: - if revision.branch_labels: - revision.branch_labels.update(revision.branch_labels) - for node in self._get_descendant_nodes( - [revision], map_, include_dependencies=False - ): - node.branch_labels.update(revision.branch_labels) - - parent = node - while ( - parent - and not parent._is_real_branch_point - and not parent.is_merge_point - ): - parent.branch_labels.update(revision.branch_labels) - if parent.down_revision: - parent = map_[parent.down_revision] - else: - break - - def _add_depends_on( - self, revisions: Collection[Revision], map_: _RevisionMapType - ) -> None: - """Resolve the 'dependencies' for each revision in a collection - in terms of actual revision ids, as opposed to branch labels or other - symbolic names. - - The collection is then assigned to the _resolved_dependencies - attribute on each revision object. - - """ - - for revision in revisions: - if revision.dependencies: - deps = [ - map_[dep] for dep in util.to_tuple(revision.dependencies) - ] - revision._resolved_dependencies = tuple( - [d.revision for d in deps if d is not None] - ) - else: - revision._resolved_dependencies = () - - def _normalize_depends_on( - self, revisions: Collection[Revision], map_: _RevisionMapType - ) -> None: - """Create a collection of "dependencies" that omits dependencies - that are already ancestor nodes for each revision in a given - collection. - - This builds upon the _resolved_dependencies collection created in the - _add_depends_on() method, looking in the fully populated revision map - for ancestors, and omitting them as the _resolved_dependencies - collection as it is copied to a new collection. The new collection is - then assigned to the _normalized_resolved_dependencies attribute on - each revision object. - - The collection is then used to determine the immediate "down revision" - identifiers for this revision. - - """ - - for revision in revisions: - if revision._resolved_dependencies: - normalized_resolved = set(revision._resolved_dependencies) - for rev in self._get_ancestor_nodes( - [revision], - include_dependencies=False, - map_=map_, - ): - if rev is revision: - continue - elif rev._resolved_dependencies: - normalized_resolved.difference_update( - rev._resolved_dependencies - ) - - revision._normalized_resolved_dependencies = tuple( - normalized_resolved - ) - else: - revision._normalized_resolved_dependencies = () - - def add_revision(self, revision: Revision, _replace: bool = False) -> None: - """add a single revision to an existing map. - - This method is for single-revision use cases, it's not - appropriate for fully populating an entire revision map. - - """ - map_ = self._revision_map - if not _replace and revision.revision in map_: - util.warn( - "Revision %s is present more than once" % revision.revision - ) - elif _replace and revision.revision not in map_: - raise Exception("revision %s not in map" % revision.revision) - - map_[revision.revision] = revision - - revisions = [revision] - self._add_branches(revisions, map_) - self._map_branch_labels(revisions, map_) - self._add_depends_on(revisions, map_) - - if revision.is_base: - self.bases += (revision.revision,) - if revision._is_real_base: - self._real_bases += (revision.revision,) - - for downrev in revision._all_down_revisions: - if downrev not in map_: - util.warn( - "Revision %s referenced from %s is not present" - % (downrev, revision) - ) - not_none(map_[downrev]).add_nextrev(revision) - - self._normalize_depends_on(revisions, map_) - - if revision._is_real_head: - self._real_heads = tuple( - head - for head in self._real_heads - if head - not in set(revision._all_down_revisions).union( - [revision.revision] - ) - ) + (revision.revision,) - if revision.is_head: - self.heads = tuple( - head - for head in self.heads - if head - not in set(revision._versioned_down_revisions).union( - [revision.revision] - ) - ) + (revision.revision,) - - def get_current_head( - self, branch_label: Optional[str] = None - ) -> Optional[str]: - """Return the current head revision. - - If the script directory has multiple heads - due to branching, an error is raised; - :meth:`.ScriptDirectory.get_heads` should be - preferred. - - :param branch_label: optional branch name which will limit the - heads considered to those which include that branch_label. - - :return: a string revision number. - - .. seealso:: - - :meth:`.ScriptDirectory.get_heads` - - """ - current_heads: Sequence[str] = self.heads - if branch_label: - current_heads = self.filter_for_lineage( - current_heads, branch_label - ) - if len(current_heads) > 1: - raise MultipleHeads( - current_heads, - "%s@head" % branch_label if branch_label else "head", - ) - - if current_heads: - return current_heads[0] - else: - return None - - def _get_base_revisions(self, identifier: str) -> Tuple[str, ...]: - return self.filter_for_lineage(self.bases, identifier) - - def get_revisions( - self, id_: Optional[_GetRevArg] - ) -> Tuple[Optional[_RevisionOrBase], ...]: - """Return the :class:`.Revision` instances with the given rev id - or identifiers. - - May be given a single identifier, a sequence of identifiers, or the - special symbols "head" or "base". The result is a tuple of one - or more identifiers, or an empty tuple in the case of "base". - - In the cases where 'head', 'heads' is requested and the - revision map is empty, returns an empty tuple. - - Supports partial identifiers, where the given identifier - is matched against all identifiers that start with the given - characters; if there is exactly one match, that determines the - full revision. - - """ - - if isinstance(id_, (list, tuple, set, frozenset)): - return sum([self.get_revisions(id_elem) for id_elem in id_], ()) - else: - resolved_id, branch_label = self._resolve_revision_number(id_) - if len(resolved_id) == 1: - try: - rint = int(resolved_id[0]) - if rint < 0: - # branch@-n -> walk down from heads - select_heads = self.get_revisions("heads") - if branch_label is not None: - select_heads = tuple( - head - for head in select_heads - if branch_label - in is_revision(head).branch_labels - ) - return tuple( - self._walk(head, steps=rint) - for head in select_heads - ) - except ValueError: - # couldn't resolve as integer - pass - return tuple( - self._revision_for_ident(rev_id, branch_label) - for rev_id in resolved_id - ) - - def get_revision(self, id_: Optional[str]) -> Optional[Revision]: - """Return the :class:`.Revision` instance with the given rev id. - - If a symbolic name such as "head" or "base" is given, resolves - the identifier into the current head or base revision. If the symbolic - name refers to multiples, :class:`.MultipleHeads` is raised. - - Supports partial identifiers, where the given identifier - is matched against all identifiers that start with the given - characters; if there is exactly one match, that determines the - full revision. - - """ - - resolved_id, branch_label = self._resolve_revision_number(id_) - if len(resolved_id) > 1: - raise MultipleHeads(resolved_id, id_) - - resolved: Union[str, Tuple[()]] = resolved_id[0] if resolved_id else () - return self._revision_for_ident(resolved, branch_label) - - def _resolve_branch(self, branch_label: str) -> Optional[Revision]: - try: - branch_rev = self._revision_map[branch_label] - except KeyError: - try: - nonbranch_rev = self._revision_for_ident(branch_label) - except ResolutionError as re: - raise ResolutionError( - "No such branch: '%s'" % branch_label, branch_label - ) from re - - else: - return nonbranch_rev - else: - return branch_rev - - def _revision_for_ident( - self, - resolved_id: Union[str, Tuple[()], None], - check_branch: Optional[str] = None, - ) -> Optional[Revision]: - branch_rev: Optional[Revision] - if check_branch: - branch_rev = self._resolve_branch(check_branch) - else: - branch_rev = None - - revision: Union[Optional[Revision], Literal[False]] - try: - revision = self._revision_map[resolved_id] - except KeyError: - # break out to avoid misleading py3k stack traces - revision = False - revs: Sequence[str] - if revision is False: - assert resolved_id - # do a partial lookup - revs = [ - x - for x in self._revision_map - if x and len(x) > 3 and x.startswith(resolved_id) - ] - - if branch_rev: - revs = self.filter_for_lineage(revs, check_branch) - if not revs: - raise ResolutionError( - "No such revision or branch '%s'%s" - % ( - resolved_id, - ( - "; please ensure at least four characters are " - "present for partial revision identifier matches" - if len(resolved_id) < 4 - else "" - ), - ), - resolved_id, - ) - elif len(revs) > 1: - raise ResolutionError( - "Multiple revisions start " - "with '%s': %s..." - % (resolved_id, ", ".join("'%s'" % r for r in revs[0:3])), - resolved_id, - ) - else: - revision = self._revision_map[revs[0]] - - if check_branch and revision is not None: - assert branch_rev is not None - assert resolved_id - if not self._shares_lineage( - revision.revision, branch_rev.revision - ): - raise ResolutionError( - "Revision %s is not a member of branch '%s'" - % (revision.revision, check_branch), - resolved_id, - ) - return revision - - def _filter_into_branch_heads( - self, targets: Iterable[Optional[_RevisionOrBase]] - ) -> Set[Optional[_RevisionOrBase]]: - targets = set(targets) - - for rev in list(targets): - assert rev - if targets.intersection( - self._get_descendant_nodes([rev], include_dependencies=False) - ).difference([rev]): - targets.discard(rev) - return targets - - def filter_for_lineage( - self, - targets: Iterable[_TR], - check_against: Optional[str], - include_dependencies: bool = False, - ) -> Tuple[_TR, ...]: - id_, branch_label = self._resolve_revision_number(check_against) - - shares = [] - if branch_label: - shares.append(branch_label) - if id_: - shares.extend(id_) - - return tuple( - tg - for tg in targets - if self._shares_lineage( - tg, shares, include_dependencies=include_dependencies - ) - ) - - def _shares_lineage( - self, - target: Optional[_RevisionOrStr], - test_against_revs: Sequence[_RevisionOrStr], - include_dependencies: bool = False, - ) -> bool: - if not test_against_revs: - return True - if not isinstance(target, Revision): - resolved_target = not_none(self._revision_for_ident(target)) - else: - resolved_target = target - - resolved_test_against_revs = [ - ( - self._revision_for_ident(test_against_rev) - if not isinstance(test_against_rev, Revision) - else test_against_rev - ) - for test_against_rev in util.to_tuple( - test_against_revs, default=() - ) - ] - - return bool( - set( - self._get_descendant_nodes( - [resolved_target], - include_dependencies=include_dependencies, - ) - ) - .union( - self._get_ancestor_nodes( - [resolved_target], - include_dependencies=include_dependencies, - ) - ) - .intersection(resolved_test_against_revs) - ) - - def _resolve_revision_number( - self, id_: Optional[_GetRevArg] - ) -> Tuple[Tuple[str, ...], Optional[str]]: - branch_label: Optional[str] - if isinstance(id_, str) and "@" in id_: - branch_label, id_ = id_.split("@", 1) - - elif id_ is not None and ( - (isinstance(id_, tuple) and id_ and not isinstance(id_[0], str)) - or not isinstance(id_, (str, tuple)) - ): - raise RevisionError( - "revision identifier %r is not a string; ensure database " - "driver settings are correct" % (id_,) - ) - - else: - branch_label = None - - # ensure map is loaded - self._revision_map - if id_ == "heads": - if branch_label: - return ( - self.filter_for_lineage(self.heads, branch_label), - branch_label, - ) - else: - return self._real_heads, branch_label - elif id_ == "head": - current_head = self.get_current_head(branch_label) - if current_head: - return (current_head,), branch_label - else: - return (), branch_label - elif id_ == "base" or id_ is None: - return (), branch_label - else: - return util.to_tuple(id_, default=None), branch_label - - def iterate_revisions( - self, - upper: _RevisionIdentifierType, - lower: _RevisionIdentifierType, - implicit_base: bool = False, - inclusive: bool = False, - assert_relative_length: bool = True, - select_for_downgrade: bool = False, - ) -> Iterator[Revision]: - """Iterate through script revisions, starting at the given - upper revision identifier and ending at the lower. - - The traversal uses strictly the `down_revision` - marker inside each migration script, so - it is a requirement that upper >= lower, - else you'll get nothing back. - - The iterator yields :class:`.Revision` objects. - - """ - fn: _CollectRevisionsProtocol - if select_for_downgrade: - fn = self._collect_downgrade_revisions - else: - fn = self._collect_upgrade_revisions - - revisions, heads = fn( - upper, - lower, - inclusive=inclusive, - implicit_base=implicit_base, - assert_relative_length=assert_relative_length, - ) - - for node in self._topological_sort(revisions, heads): - yield not_none(self.get_revision(node)) - - def _get_descendant_nodes( - self, - targets: Collection[Optional[_RevisionOrBase]], - map_: Optional[_RevisionMapType] = None, - check: bool = False, - omit_immediate_dependencies: bool = False, - include_dependencies: bool = True, - ) -> Iterator[Any]: - if omit_immediate_dependencies: - - def fn(rev: Revision) -> Iterable[str]: - if rev not in targets: - return rev._all_nextrev - else: - return rev.nextrev - - elif include_dependencies: - - def fn(rev: Revision) -> Iterable[str]: - return rev._all_nextrev - - else: - - def fn(rev: Revision) -> Iterable[str]: - return rev.nextrev - - return self._iterate_related_revisions( - fn, targets, map_=map_, check=check - ) - - def _get_ancestor_nodes( - self, - targets: Collection[Optional[_RevisionOrBase]], - map_: Optional[_RevisionMapType] = None, - check: bool = False, - include_dependencies: bool = True, - ) -> Iterator[Revision]: - if include_dependencies: - - def fn(rev: Revision) -> Iterable[str]: - return rev._normalized_down_revisions - - else: - - def fn(rev: Revision) -> Iterable[str]: - return rev._versioned_down_revisions - - return self._iterate_related_revisions( - fn, targets, map_=map_, check=check - ) - - def _iterate_related_revisions( - self, - fn: Callable[[Revision], Iterable[str]], - targets: Collection[Optional[_RevisionOrBase]], - map_: Optional[_RevisionMapType], - check: bool = False, - ) -> Iterator[Revision]: - if map_ is None: - map_ = self._revision_map - - seen = set() - todo: Deque[Revision] = collections.deque() - for target_for in targets: - target = is_revision(target_for) - todo.append(target) - if check: - per_target = set() - - while todo: - rev = todo.pop() - if check: - per_target.add(rev) - - if rev in seen: - continue - seen.add(rev) - # Check for map errors before collecting. - for rev_id in fn(rev): - next_rev = map_[rev_id] - assert next_rev is not None - if next_rev.revision != rev_id: - raise RevisionError( - "Dependency resolution failed; broken map" - ) - todo.append(next_rev) - yield rev - if check: - overlaps = per_target.intersection(targets).difference( - [target] - ) - if overlaps: - raise RevisionError( - "Requested revision %s overlaps with " - "other requested revisions %s" - % ( - target.revision, - ", ".join(r.revision for r in overlaps), - ) - ) - - def _topological_sort( - self, - revisions: Collection[Revision], - heads: Any, - ) -> List[str]: - """Yield revision ids of a collection of Revision objects in - topological sorted order (i.e. revisions always come after their - down_revisions and dependencies). Uses the order of keys in - _revision_map to sort. - - """ - - id_to_rev = self._revision_map - - def get_ancestors(rev_id: str) -> Set[str]: - return { - r.revision - for r in self._get_ancestor_nodes([id_to_rev[rev_id]]) - } - - todo = {d.revision for d in revisions} - - # Use revision map (ordered dict) key order to pre-sort. - inserted_order = list(self._revision_map) - - current_heads = list( - sorted( - {d.revision for d in heads if d.revision in todo}, - key=inserted_order.index, - ) - ) - ancestors_by_idx = [get_ancestors(rev_id) for rev_id in current_heads] - - output = [] - - current_candidate_idx = 0 - while current_heads: - candidate = current_heads[current_candidate_idx] - - for check_head_index, ancestors in enumerate(ancestors_by_idx): - # scan all the heads. see if we can continue walking - # down the current branch indicated by current_candidate_idx. - if ( - check_head_index != current_candidate_idx - and candidate in ancestors - ): - current_candidate_idx = check_head_index - # nope, another head is dependent on us, they have - # to be traversed first - break - else: - # yup, we can emit - if candidate in todo: - output.append(candidate) - todo.remove(candidate) - - # now update the heads with our ancestors. - - candidate_rev = id_to_rev[candidate] - assert candidate_rev is not None - - heads_to_add = [ - r - for r in candidate_rev._normalized_down_revisions - if r in todo and r not in current_heads - ] - - if not heads_to_add: - # no ancestors, so remove this head from the list - del current_heads[current_candidate_idx] - del ancestors_by_idx[current_candidate_idx] - current_candidate_idx = max(current_candidate_idx - 1, 0) - else: - if ( - not candidate_rev._normalized_resolved_dependencies - and len(candidate_rev._versioned_down_revisions) == 1 - ): - current_heads[current_candidate_idx] = heads_to_add[0] - - # for plain movement down a revision line without - # any mergepoints, branchpoints, or deps, we - # can update the ancestors collection directly - # by popping out the candidate we just emitted - ancestors_by_idx[current_candidate_idx].discard( - candidate - ) - - else: - # otherwise recalculate it again, things get - # complicated otherwise. This can possibly be - # improved to not run the whole ancestor thing - # each time but it was getting complicated - current_heads[current_candidate_idx] = heads_to_add[0] - current_heads.extend(heads_to_add[1:]) - ancestors_by_idx[current_candidate_idx] = ( - get_ancestors(heads_to_add[0]) - ) - ancestors_by_idx.extend( - get_ancestors(head) for head in heads_to_add[1:] - ) - - assert not todo - return output - - def _walk( - self, - start: Optional[Union[str, Revision]], - steps: int, - branch_label: Optional[str] = None, - no_overwalk: bool = True, - ) -> Optional[_RevisionOrBase]: - """ - Walk the requested number of :steps up (steps > 0) or down (steps < 0) - the revision tree. - - :branch_label is used to select branches only when walking up. - - If the walk goes past the boundaries of the tree and :no_overwalk is - True, None is returned, otherwise the walk terminates early. - - A RevisionError is raised if there is no unambiguous revision to - walk to. - """ - initial: Optional[_RevisionOrBase] - if isinstance(start, str): - initial = self.get_revision(start) - else: - initial = start - - children: Sequence[Optional[_RevisionOrBase]] - for _ in range(abs(steps)): - if steps > 0: - assert initial != "base" # type: ignore[comparison-overlap] - # Walk up - walk_up = [ - is_revision(rev) - for rev in self.get_revisions( - self.bases if initial is None else initial.nextrev - ) - ] - if branch_label: - children = self.filter_for_lineage(walk_up, branch_label) - else: - children = walk_up - else: - # Walk down - if initial == "base": # type: ignore[comparison-overlap] - children = () - else: - children = self.get_revisions( - self.heads - if initial is None - else initial.down_revision - ) - if not children: - children = ("base",) - if not children: - # This will return an invalid result if no_overwalk, otherwise - # further steps will stay where we are. - ret = None if no_overwalk else initial - return ret - elif len(children) > 1: - raise RevisionError("Ambiguous walk") - initial = children[0] - - return initial - - def _parse_downgrade_target( - self, - current_revisions: _RevisionIdentifierType, - target: _RevisionIdentifierType, - assert_relative_length: bool, - ) -> Tuple[Optional[str], Optional[_RevisionOrBase]]: - """ - Parse downgrade command syntax :target to retrieve the target revision - and branch label (if any) given the :current_revisions stamp of the - database. - - Returns a tuple (branch_label, target_revision) where branch_label - is a string from the command specifying the branch to consider (or - None if no branch given), and target_revision is a Revision object - which the command refers to. target_revisions is None if the command - refers to 'base'. The target may be specified in absolute form, or - relative to :current_revisions. - """ - if target is None: - return None, None - assert isinstance( - target, str - ), "Expected downgrade target in string form" - match = _relative_destination.match(target) - if match: - branch_label, symbol, relative = match.groups() - rel_int = int(relative) - if rel_int >= 0: - if symbol is None: - # Downgrading to current + n is not valid. - raise RevisionError( - "Relative revision %s didn't " - "produce %d migrations" % (relative, abs(rel_int)) - ) - # Find target revision relative to given symbol. - rev = self._walk( - symbol, - rel_int, - branch_label, - no_overwalk=assert_relative_length, - ) - if rev is None: - raise RevisionError("Walked too far") - return branch_label, rev - else: - relative_revision = symbol is None - if relative_revision: - # Find target revision relative to current state. - if branch_label: - cr_tuple = util.to_tuple(current_revisions) - symbol_list: Sequence[str] - symbol_list = self.filter_for_lineage( - cr_tuple, branch_label - ) - if not symbol_list: - # check the case where there are multiple branches - # but there is currently a single heads, since all - # other branch heads are dependent of the current - # single heads. - all_current = cast( - Set[Revision], self._get_all_current(cr_tuple) - ) - sl_all_current = self.filter_for_lineage( - all_current, branch_label - ) - symbol_list = [ - r.revision if r else r # type: ignore[misc] - for r in sl_all_current - ] - - assert len(symbol_list) == 1 - symbol = symbol_list[0] - else: - current_revisions = util.to_tuple(current_revisions) - if not current_revisions: - raise RevisionError( - "Relative revision %s didn't " - "produce %d migrations" - % (relative, abs(rel_int)) - ) - # Have to check uniques here for duplicate rows test. - if len(set(current_revisions)) > 1: - util.warn( - "downgrade -1 from multiple heads is " - "ambiguous; " - "this usage will be disallowed in a future " - "release." - ) - symbol = current_revisions[0] - # Restrict iteration to just the selected branch when - # ambiguous branches are involved. - branch_label = symbol - # Walk down the tree to find downgrade target. - rev = self._walk( - start=( - self.get_revision(symbol) - if branch_label is None - else self.get_revision( - "%s@%s" % (branch_label, symbol) - ) - ), - steps=rel_int, - no_overwalk=assert_relative_length, - ) - if rev is None: - if relative_revision: - raise RevisionError( - "Relative revision %s didn't " - "produce %d migrations" % (relative, abs(rel_int)) - ) - else: - raise RevisionError("Walked too far") - return branch_label, rev - - # No relative destination given, revision specified is absolute. - branch_label, _, symbol = target.rpartition("@") - if not branch_label: - branch_label = None - return branch_label, self.get_revision(symbol) - - def _parse_upgrade_target( - self, - current_revisions: _RevisionIdentifierType, - target: _RevisionIdentifierType, - assert_relative_length: bool, - ) -> Tuple[Optional[_RevisionOrBase], ...]: - """ - Parse upgrade command syntax :target to retrieve the target revision - and given the :current_revisions stamp of the database. - - Returns a tuple of Revision objects which should be iterated/upgraded - to. The target may be specified in absolute form, or relative to - :current_revisions. - """ - if isinstance(target, str): - match = _relative_destination.match(target) - else: - match = None - - if not match: - # No relative destination, target is absolute. - return self.get_revisions(target) - - current_revisions_tup: Union[str, Tuple[Optional[str], ...], None] - current_revisions_tup = util.to_tuple(current_revisions) - - branch_label, symbol, relative_str = match.groups() - relative = int(relative_str) - if relative > 0: - if symbol is None: - if not current_revisions_tup: - current_revisions_tup = (None,) - # Try to filter to a single target (avoid ambiguous branches). - start_revs = current_revisions_tup - if branch_label: - start_revs = self.filter_for_lineage( - self.get_revisions(current_revisions_tup), # type: ignore[arg-type] # noqa: E501 - branch_label, - ) - if not start_revs: - # The requested branch is not a head, so we need to - # backtrack to find a branchpoint. - active_on_branch = self.filter_for_lineage( - self._get_ancestor_nodes( - self.get_revisions(current_revisions_tup) - ), - branch_label, - ) - # Find the tips of this set of revisions (revisions - # without children within the set). - start_revs = tuple( - {rev.revision for rev in active_on_branch} - - { - down - for rev in active_on_branch - for down in rev._normalized_down_revisions - } - ) - if not start_revs: - # We must need to go right back to base to find - # a starting point for this branch. - start_revs = (None,) - if len(start_revs) > 1: - raise RevisionError( - "Ambiguous upgrade from multiple current revisions" - ) - # Walk up from unique target revision. - rev = self._walk( - start=start_revs[0], - steps=relative, - branch_label=branch_label, - no_overwalk=assert_relative_length, - ) - if rev is None: - raise RevisionError( - "Relative revision %s didn't " - "produce %d migrations" % (relative_str, abs(relative)) - ) - return (rev,) - else: - # Walk is relative to a given revision, not the current state. - return ( - self._walk( - start=self.get_revision(symbol), - steps=relative, - branch_label=branch_label, - no_overwalk=assert_relative_length, - ), - ) - else: - if symbol is None: - # Upgrading to current - n is not valid. - raise RevisionError( - "Relative revision %s didn't " - "produce %d migrations" % (relative, abs(relative)) - ) - return ( - self._walk( - start=( - self.get_revision(symbol) - if branch_label is None - else self.get_revision( - "%s@%s" % (branch_label, symbol) - ) - ), - steps=relative, - no_overwalk=assert_relative_length, - ), - ) - - def _collect_downgrade_revisions( - self, - upper: _RevisionIdentifierType, - lower: _RevisionIdentifierType, - inclusive: bool, - implicit_base: bool, - assert_relative_length: bool, - ) -> Tuple[Set[Revision], Tuple[Optional[_RevisionOrBase], ...]]: - """ - Compute the set of current revisions specified by :upper, and the - downgrade target specified by :target. Return all dependents of target - which are currently active. - - :inclusive=True includes the target revision in the set - """ - - branch_label, target_revision = self._parse_downgrade_target( - current_revisions=upper, - target=lower, - assert_relative_length=assert_relative_length, - ) - if target_revision == "base": - target_revision = None - assert target_revision is None or isinstance(target_revision, Revision) - - roots: List[Revision] - # Find candidates to drop. - if target_revision is None: - # Downgrading back to base: find all tree roots. - roots = [ - rev - for rev in self._revision_map.values() - if rev is not None and rev.down_revision is None - ] - elif inclusive: - # inclusive implies target revision should also be dropped - roots = [target_revision] - else: - # Downgrading to fixed target: find all direct children. - roots = [ - is_revision(rev) - for rev in self.get_revisions(target_revision.nextrev) - ] - - if branch_label and len(roots) > 1: - # Need to filter roots. - ancestors = { - rev.revision - for rev in self._get_ancestor_nodes( - [self._resolve_branch(branch_label)], - include_dependencies=False, - ) - } - # Intersection gives the root revisions we are trying to - # rollback with the downgrade. - roots = [ - is_revision(rev) - for rev in self.get_revisions( - {rev.revision for rev in roots}.intersection(ancestors) - ) - ] - - # Ensure we didn't throw everything away when filtering branches. - if len(roots) == 0: - raise RevisionError( - "Not a valid downgrade target from current heads" - ) - - heads = self.get_revisions(upper) - - # Aim is to drop :branch_revision; to do so we also need to drop its - # descendents and anything dependent on it. - downgrade_revisions = set( - self._get_descendant_nodes( - roots, - include_dependencies=True, - omit_immediate_dependencies=False, - ) - ) - active_revisions = set( - self._get_ancestor_nodes(heads, include_dependencies=True) - ) - - # Emit revisions to drop in reverse topological sorted order. - downgrade_revisions.intersection_update(active_revisions) - - if implicit_base: - # Wind other branches back to base. - downgrade_revisions.update( - active_revisions.difference(self._get_ancestor_nodes(roots)) - ) - - if ( - target_revision is not None - and not downgrade_revisions - and target_revision not in heads - ): - # Empty intersection: target revs are not present. - - raise RangeNotAncestorError("Nothing to drop", upper) - - return downgrade_revisions, heads - - def _collect_upgrade_revisions( - self, - upper: _RevisionIdentifierType, - lower: _RevisionIdentifierType, - inclusive: bool, - implicit_base: bool, - assert_relative_length: bool, - ) -> Tuple[Set[Revision], Tuple[Revision, ...]]: - """ - Compute the set of required revisions specified by :upper, and the - current set of active revisions specified by :lower. Find the - difference between the two to compute the required upgrades. - - :inclusive=True includes the current/lower revisions in the set - - :implicit_base=False only returns revisions which are downstream - of the current/lower revisions. Dependencies from branches with - different bases will not be included. - """ - targets: Collection[Revision] = [ - is_revision(rev) - for rev in self._parse_upgrade_target( - current_revisions=lower, - target=upper, - assert_relative_length=assert_relative_length, - ) - ] - - # assert type(targets) is tuple, "targets should be a tuple" - - # Handled named bases (e.g. branch@... -> heads should only produce - # targets on the given branch) - if isinstance(lower, str) and "@" in lower: - branch, _, _ = lower.partition("@") - branch_rev = self.get_revision(branch) - if branch_rev is not None and branch_rev.revision == branch: - # A revision was used as a label; get its branch instead - assert len(branch_rev.branch_labels) == 1 - branch = next(iter(branch_rev.branch_labels)) - targets = { - need for need in targets if branch in need.branch_labels - } - - required_node_set = set( - self._get_ancestor_nodes( - targets, check=True, include_dependencies=True - ) - ).union(targets) - - current_revisions = self.get_revisions(lower) - if not implicit_base and any( - rev not in required_node_set - for rev in current_revisions - if rev is not None - ): - raise RangeNotAncestorError(lower, upper) - assert ( - type(current_revisions) is tuple - ), "current_revisions should be a tuple" - - # Special case where lower = a relative value (get_revisions can't - # find it) - if current_revisions and current_revisions[0] is None: - _, rev = self._parse_downgrade_target( - current_revisions=upper, - target=lower, - assert_relative_length=assert_relative_length, - ) - assert rev - if rev == "base": - current_revisions = tuple() - lower = None - else: - current_revisions = (rev,) - lower = rev.revision - - current_node_set = set( - self._get_ancestor_nodes( - current_revisions, check=True, include_dependencies=True - ) - ).union(current_revisions) - - needs = required_node_set.difference(current_node_set) - - # Include the lower revision (=current_revisions?) in the iteration - if inclusive: - needs.update(is_revision(rev) for rev in self.get_revisions(lower)) - # By default, base is implicit as we want all dependencies returned. - # Base is also implicit if lower = base - # implicit_base=False -> only return direct downstreams of - # current_revisions - if current_revisions and not implicit_base: - lower_descendents = self._get_descendant_nodes( - [is_revision(rev) for rev in current_revisions], - check=True, - include_dependencies=False, - ) - needs.intersection_update(lower_descendents) - - return needs, tuple(targets) - - def _get_all_current( - self, id_: Tuple[str, ...] - ) -> Set[Optional[_RevisionOrBase]]: - top_revs: Set[Optional[_RevisionOrBase]] - top_revs = set(self.get_revisions(id_)) - top_revs.update( - self._get_ancestor_nodes(list(top_revs), include_dependencies=True) - ) - return self._filter_into_branch_heads(top_revs) - - -class Revision: - """Base class for revisioned objects. - - The :class:`.Revision` class is the base of the more public-facing - :class:`.Script` object, which represents a migration script. - The mechanics of revision management and traversal are encapsulated - within :class:`.Revision`, while :class:`.Script` applies this logic - to Python files in a version directory. - - """ - - nextrev: FrozenSet[str] = frozenset() - """following revisions, based on down_revision only.""" - - _all_nextrev: FrozenSet[str] = frozenset() - - revision: str = None # type: ignore[assignment] - """The string revision number.""" - - down_revision: Optional[_RevIdType] = None - """The ``down_revision`` identifier(s) within the migration script. - - Note that the total set of "down" revisions is - down_revision + dependencies. - - """ - - dependencies: Optional[_RevIdType] = None - """Additional revisions which this revision is dependent on. - - From a migration standpoint, these dependencies are added to the - down_revision to form the full iteration. However, the separation - of down_revision from "dependencies" is to assist in navigating - a history that contains many branches, typically a multi-root scenario. - - """ - - branch_labels: Set[str] = None # type: ignore[assignment] - """Optional string/tuple of symbolic names to apply to this - revision's branch""" - - _resolved_dependencies: Tuple[str, ...] - _normalized_resolved_dependencies: Tuple[str, ...] - - @classmethod - def verify_rev_id(cls, revision: str) -> None: - illegal_chars = set(revision).intersection(_revision_illegal_chars) - if illegal_chars: - raise RevisionError( - "Character(s) '%s' not allowed in revision identifier '%s'" - % (", ".join(sorted(illegal_chars)), revision) - ) - - def __init__( - self, - revision: str, - down_revision: Optional[Union[str, Tuple[str, ...]]], - dependencies: Optional[Union[str, Tuple[str, ...]]] = None, - branch_labels: Optional[Union[str, Tuple[str, ...]]] = None, - ) -> None: - if down_revision and revision in util.to_tuple(down_revision): - raise LoopDetected(revision) - elif dependencies is not None and revision in util.to_tuple( - dependencies - ): - raise DependencyLoopDetected(revision) - - self.verify_rev_id(revision) - self.revision = revision - self.down_revision = tuple_rev_as_scalar(util.to_tuple(down_revision)) - self.dependencies = tuple_rev_as_scalar(util.to_tuple(dependencies)) - self._orig_branch_labels = util.to_tuple(branch_labels, default=()) - self.branch_labels = set(self._orig_branch_labels) - - def __repr__(self) -> str: - args = [repr(self.revision), repr(self.down_revision)] - if self.dependencies: - args.append("dependencies=%r" % (self.dependencies,)) - if self.branch_labels: - args.append("branch_labels=%r" % (self.branch_labels,)) - return "%s(%s)" % (self.__class__.__name__, ", ".join(args)) - - def add_nextrev(self, revision: Revision) -> None: - self._all_nextrev = self._all_nextrev.union([revision.revision]) - if self.revision in revision._versioned_down_revisions: - self.nextrev = self.nextrev.union([revision.revision]) - - @property - def _all_down_revisions(self) -> Tuple[str, ...]: - return util.dedupe_tuple( - util.to_tuple(self.down_revision, default=()) - + self._resolved_dependencies - ) - - @property - def _normalized_down_revisions(self) -> Tuple[str, ...]: - """return immediate down revisions for a rev, omitting dependencies - that are still dependencies of ancestors. - - """ - return util.dedupe_tuple( - util.to_tuple(self.down_revision, default=()) - + self._normalized_resolved_dependencies - ) - - @property - def _versioned_down_revisions(self) -> Tuple[str, ...]: - return util.to_tuple(self.down_revision, default=()) - - @property - def is_head(self) -> bool: - """Return True if this :class:`.Revision` is a 'head' revision. - - This is determined based on whether any other :class:`.Script` - within the :class:`.ScriptDirectory` refers to this - :class:`.Script`. Multiple heads can be present. - - """ - return not bool(self.nextrev) - - @property - def _is_real_head(self) -> bool: - return not bool(self._all_nextrev) - - @property - def is_base(self) -> bool: - """Return True if this :class:`.Revision` is a 'base' revision.""" - - return self.down_revision is None - - @property - def _is_real_base(self) -> bool: - """Return True if this :class:`.Revision` is a "real" base revision, - e.g. that it has no dependencies either.""" - - # we use self.dependencies here because this is called up - # in initialization where _real_dependencies isn't set up - # yet - return self.down_revision is None and self.dependencies is None - - @property - def is_branch_point(self) -> bool: - """Return True if this :class:`.Script` is a branch point. - - A branchpoint is defined as a :class:`.Script` which is referred - to by more than one succeeding :class:`.Script`, that is more - than one :class:`.Script` has a `down_revision` identifier pointing - here. - - """ - return len(self.nextrev) > 1 - - @property - def _is_real_branch_point(self) -> bool: - """Return True if this :class:`.Script` is a 'real' branch point, - taking into account dependencies as well. - - """ - return len(self._all_nextrev) > 1 - - @property - def is_merge_point(self) -> bool: - """Return True if this :class:`.Script` is a merge point.""" - - return len(self._versioned_down_revisions) > 1 - - -@overload -def tuple_rev_as_scalar(rev: None) -> None: ... - - -@overload -def tuple_rev_as_scalar( - rev: Union[Tuple[_T, ...], List[_T]] -) -> Union[_T, Tuple[_T, ...], List[_T]]: ... - - -def tuple_rev_as_scalar( - rev: Optional[Sequence[_T]], -) -> Union[_T, Sequence[_T], None]: - if not rev: - return None - elif len(rev) == 1: - return rev[0] - else: - return rev - - -def is_revision(rev: Any) -> Revision: - assert isinstance(rev, Revision) - return rev diff --git a/myenv/Lib/site-packages/alembic/script/write_hooks.py b/myenv/Lib/site-packages/alembic/script/write_hooks.py deleted file mode 100644 index 9977147..0000000 --- a/myenv/Lib/site-packages/alembic/script/write_hooks.py +++ /dev/null @@ -1,179 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import shlex -import subprocess -import sys -from typing import Any -from typing import Callable -from typing import Dict -from typing import List -from typing import Mapping -from typing import Optional -from typing import Union - -from .. import util -from ..util import compat - - -REVISION_SCRIPT_TOKEN = "REVISION_SCRIPT_FILENAME" - -_registry: dict = {} - - -def register(name: str) -> Callable: - """A function decorator that will register that function as a write hook. - - See the documentation linked below for an example. - - .. seealso:: - - :ref:`post_write_hooks_custom` - - - """ - - def decorate(fn): - _registry[name] = fn - return fn - - return decorate - - -def _invoke( - name: str, revision: str, options: Mapping[str, Union[str, int]] -) -> Any: - """Invokes the formatter registered for the given name. - - :param name: The name of a formatter in the registry - :param revision: A :class:`.MigrationRevision` instance - :param options: A dict containing kwargs passed to the - specified formatter. - :raises: :class:`alembic.util.CommandError` - """ - try: - hook = _registry[name] - except KeyError as ke: - raise util.CommandError( - f"No formatter with name '{name}' registered" - ) from ke - else: - return hook(revision, options) - - -def _run_hooks(path: str, hook_config: Mapping[str, str]) -> None: - """Invoke hooks for a generated revision.""" - - from .base import _split_on_space_comma - - names = _split_on_space_comma.split(hook_config.get("hooks", "")) - - for name in names: - if not name: - continue - opts = { - key[len(name) + 1 :]: hook_config[key] - for key in hook_config - if key.startswith(name + ".") - } - opts["_hook_name"] = name - try: - type_ = opts["type"] - except KeyError as ke: - raise util.CommandError( - f"Key {name}.type is required for post write hook {name!r}" - ) from ke - else: - with util.status( - f"Running post write hook {name!r}", newline=True - ): - _invoke(type_, path, opts) - - -def _parse_cmdline_options(cmdline_options_str: str, path: str) -> List[str]: - """Parse options from a string into a list. - - Also substitutes the revision script token with the actual filename of - the revision script. - - If the revision script token doesn't occur in the options string, it is - automatically prepended. - """ - if REVISION_SCRIPT_TOKEN not in cmdline_options_str: - cmdline_options_str = REVISION_SCRIPT_TOKEN + " " + cmdline_options_str - cmdline_options_list = shlex.split( - cmdline_options_str, posix=compat.is_posix - ) - cmdline_options_list = [ - option.replace(REVISION_SCRIPT_TOKEN, path) - for option in cmdline_options_list - ] - return cmdline_options_list - - -@register("console_scripts") -def console_scripts( - path: str, options: dict, ignore_output: bool = False -) -> None: - try: - entrypoint_name = options["entrypoint"] - except KeyError as ke: - raise util.CommandError( - f"Key {options['_hook_name']}.entrypoint is required for post " - f"write hook {options['_hook_name']!r}" - ) from ke - for entry in compat.importlib_metadata_get("console_scripts"): - if entry.name == entrypoint_name: - impl: Any = entry - break - else: - raise util.CommandError( - f"Could not find entrypoint console_scripts.{entrypoint_name}" - ) - cwd: Optional[str] = options.get("cwd", None) - cmdline_options_str = options.get("options", "") - cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path) - - kw: Dict[str, Any] = {} - if ignore_output: - kw["stdout"] = kw["stderr"] = subprocess.DEVNULL - - subprocess.run( - [ - sys.executable, - "-c", - f"import {impl.module}; {impl.module}.{impl.attr}()", - ] - + cmdline_options_list, - cwd=cwd, - **kw, - ) - - -@register("exec") -def exec_(path: str, options: dict, ignore_output: bool = False) -> None: - try: - executable = options["executable"] - except KeyError as ke: - raise util.CommandError( - f"Key {options['_hook_name']}.executable is required for post " - f"write hook {options['_hook_name']!r}" - ) from ke - cwd: Optional[str] = options.get("cwd", None) - cmdline_options_str = options.get("options", "") - cmdline_options_list = _parse_cmdline_options(cmdline_options_str, path) - - kw: Dict[str, Any] = {} - if ignore_output: - kw["stdout"] = kw["stderr"] = subprocess.DEVNULL - - subprocess.run( - [ - executable, - *cmdline_options_list, - ], - cwd=cwd, - **kw, - ) diff --git a/myenv/Lib/site-packages/alembic/templates/async/README b/myenv/Lib/site-packages/alembic/templates/async/README deleted file mode 100644 index e0d0858..0000000 --- a/myenv/Lib/site-packages/alembic/templates/async/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration with an async dbapi. \ No newline at end of file diff --git a/myenv/Lib/site-packages/alembic/templates/async/alembic.ini.mako b/myenv/Lib/site-packages/alembic/templates/async/alembic.ini.mako deleted file mode 100644 index 46a0904..0000000 --- a/myenv/Lib/site-packages/alembic/templates/async/alembic.ini.mako +++ /dev/null @@ -1,115 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts. -# Use forward slashes (/) also on windows to provide an os agnostic path -script_location = ${script_location} - -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python>=3.9 or backports.zoneinfo library. -# Any required deps can installed by adding `alembic[tz]` to the pip requirements -# string value is passed to ZoneInfo() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to ${script_location}/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -# version_path_separator = newline -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -sqlalchemy.url = driver://user:pass@localhost/dbname - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary -# hooks = ruff -# ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/myenv/Lib/site-packages/alembic/templates/async/env.py b/myenv/Lib/site-packages/alembic/templates/async/env.py deleted file mode 100644 index 9f2d519..0000000 --- a/myenv/Lib/site-packages/alembic/templates/async/env.py +++ /dev/null @@ -1,89 +0,0 @@ -import asyncio -from logging.config import fileConfig - -from sqlalchemy import pool -from sqlalchemy.engine import Connection -from sqlalchemy.ext.asyncio import async_engine_from_config - -from alembic import context - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -if config.config_file_name is not None: - fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = None - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline() -> None: - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, - target_metadata=target_metadata, - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def do_run_migrations(connection: Connection) -> None: - context.configure(connection=connection, target_metadata=target_metadata) - - with context.begin_transaction(): - context.run_migrations() - - -async def run_async_migrations() -> None: - """In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - connectable = async_engine_from_config( - config.get_section(config.config_ini_section, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - async with connectable.connect() as connection: - await connection.run_sync(do_run_migrations) - - await connectable.dispose() - - -def run_migrations_online() -> None: - """Run migrations in 'online' mode.""" - - asyncio.run(run_async_migrations()) - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/myenv/Lib/site-packages/alembic/templates/async/script.py.mako b/myenv/Lib/site-packages/alembic/templates/async/script.py.mako deleted file mode 100644 index fbc4b07..0000000 --- a/myenv/Lib/site-packages/alembic/templates/async/script.py.mako +++ /dev/null @@ -1,26 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} -branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} -depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} - - -def upgrade() -> None: - ${upgrades if upgrades else "pass"} - - -def downgrade() -> None: - ${downgrades if downgrades else "pass"} diff --git a/myenv/Lib/site-packages/alembic/templates/generic/README b/myenv/Lib/site-packages/alembic/templates/generic/README deleted file mode 100644 index 98e4f9c..0000000 --- a/myenv/Lib/site-packages/alembic/templates/generic/README +++ /dev/null @@ -1 +0,0 @@ -Generic single-database configuration. \ No newline at end of file diff --git a/myenv/Lib/site-packages/alembic/templates/generic/alembic.ini.mako b/myenv/Lib/site-packages/alembic/templates/generic/alembic.ini.mako deleted file mode 100644 index dd4ea58..0000000 --- a/myenv/Lib/site-packages/alembic/templates/generic/alembic.ini.mako +++ /dev/null @@ -1,117 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# path to migration scripts -# Use forward slashes (/) also on windows to provide an os agnostic path -script_location = ${script_location} - -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file -# for all available tokens -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python>=3.9 or backports.zoneinfo library. -# Any required deps can installed by adding `alembic[tz]` to the pip requirements -# string value is passed to ZoneInfo() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to ${script_location}/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -# version_path_separator = newline -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -sqlalchemy.url = driver://user:pass@localhost/dbname - - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary -# hooks = ruff -# ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/myenv/Lib/site-packages/alembic/templates/generic/env.py b/myenv/Lib/site-packages/alembic/templates/generic/env.py deleted file mode 100644 index 36112a3..0000000 --- a/myenv/Lib/site-packages/alembic/templates/generic/env.py +++ /dev/null @@ -1,78 +0,0 @@ -from logging.config import fileConfig - -from sqlalchemy import engine_from_config -from sqlalchemy import pool - -from alembic import context - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -if config.config_file_name is not None: - fileConfig(config.config_file_name) - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -target_metadata = None - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline() -> None: - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, - target_metadata=target_metadata, - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online() -> None: - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - connectable = engine_from_config( - config.get_section(config.config_ini_section, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - with connectable.connect() as connection: - context.configure( - connection=connection, target_metadata=target_metadata - ) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/myenv/Lib/site-packages/alembic/templates/generic/script.py.mako b/myenv/Lib/site-packages/alembic/templates/generic/script.py.mako deleted file mode 100644 index fbc4b07..0000000 --- a/myenv/Lib/site-packages/alembic/templates/generic/script.py.mako +++ /dev/null @@ -1,26 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} -branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} -depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} - - -def upgrade() -> None: - ${upgrades if upgrades else "pass"} - - -def downgrade() -> None: - ${downgrades if downgrades else "pass"} diff --git a/myenv/Lib/site-packages/alembic/templates/multidb/README b/myenv/Lib/site-packages/alembic/templates/multidb/README deleted file mode 100644 index f046ec9..0000000 --- a/myenv/Lib/site-packages/alembic/templates/multidb/README +++ /dev/null @@ -1,12 +0,0 @@ -Rudimentary multi-database configuration. - -Multi-DB isn't vastly different from generic. The primary difference is that it -will run the migrations N times (depending on how many databases you have -configured), providing one engine name and associated context for each run. - -That engine name will then allow the migration to restrict what runs within it to -just the appropriate migrations for that engine. You can see this behavior within -the mako template. - -In the provided configuration, you'll need to have `databases` provided in -alembic's config, and an `sqlalchemy.url` provided for each engine name. diff --git a/myenv/Lib/site-packages/alembic/templates/multidb/alembic.ini.mako b/myenv/Lib/site-packages/alembic/templates/multidb/alembic.ini.mako deleted file mode 100644 index d5cc86f..0000000 --- a/myenv/Lib/site-packages/alembic/templates/multidb/alembic.ini.mako +++ /dev/null @@ -1,122 +0,0 @@ -# a multi-database configuration. - -[alembic] -# path to migration scripts -# Use forward slashes (/) also on windows to provide an os agnostic path -script_location = ${script_location} - -# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s -# Uncomment the line below if you want the files to be prepended with date and time -# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file -# for all available tokens -# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s - -# sys.path path, will be prepended to sys.path if present. -# defaults to the current working directory. -prepend_sys_path = . - -# timezone to use when rendering the date within the migration file -# as well as the filename. -# If specified, requires the python>=3.9 or backports.zoneinfo library. -# Any required deps can installed by adding `alembic[tz]` to the pip requirements -# string value is passed to ZoneInfo() -# leave blank for localtime -# timezone = - -# max length of characters to apply to the "slug" field -# truncate_slug_length = 40 - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - -# set to 'true' to allow .pyc and .pyo files without -# a source .py file to be detected as revisions in the -# versions/ directory -# sourceless = false - -# version location specification; This defaults -# to ${script_location}/versions. When using multiple version -# directories, initial revisions must be specified with --version-path. -# The path separator used here should be the separator specified by "version_path_separator" below. -# version_locations = %(here)s/bar:%(here)s/bat:${script_location}/versions - -# version path separator; As mentioned above, this is the character used to split -# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep. -# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas. -# Valid values for version_path_separator are: -# -# version_path_separator = : -# version_path_separator = ; -# version_path_separator = space -# version_path_separator = newline -version_path_separator = os # Use os.pathsep. Default configuration used for new projects. - -# set to 'true' to search source files recursively -# in each "version_locations" directory -# new in Alembic version 1.10 -# recursive_version_locations = false - -# the output encoding used when revision files -# are written from script.py.mako -# output_encoding = utf-8 - -databases = engine1, engine2 - -[engine1] -sqlalchemy.url = driver://user:pass@localhost/dbname - -[engine2] -sqlalchemy.url = driver://user:pass@localhost/dbname2 - -[post_write_hooks] -# post_write_hooks defines scripts or Python functions that are run -# on newly generated revision scripts. See the documentation for further -# detail and examples - -# format using "black" - use the console_scripts runner, against the "black" entrypoint -# hooks = black -# black.type = console_scripts -# black.entrypoint = black -# black.options = -l 79 REVISION_SCRIPT_FILENAME - -# lint with attempts to fix using "ruff" - use the exec runner, execute a binary -# hooks = ruff -# ruff.type = exec -# ruff.executable = %(here)s/.venv/bin/ruff -# ruff.options = --fix REVISION_SCRIPT_FILENAME - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/myenv/Lib/site-packages/alembic/templates/multidb/env.py b/myenv/Lib/site-packages/alembic/templates/multidb/env.py deleted file mode 100644 index e937b64..0000000 --- a/myenv/Lib/site-packages/alembic/templates/multidb/env.py +++ /dev/null @@ -1,140 +0,0 @@ -import logging -from logging.config import fileConfig -import re - -from sqlalchemy import engine_from_config -from sqlalchemy import pool - -from alembic import context - -USE_TWOPHASE = False - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -if config.config_file_name is not None: - fileConfig(config.config_file_name) -logger = logging.getLogger("alembic.env") - -# gather section names referring to different -# databases. These are named "engine1", "engine2" -# in the sample .ini file. -db_names = config.get_main_option("databases", "") - -# add your model's MetaData objects here -# for 'autogenerate' support. These must be set -# up to hold just those tables targeting a -# particular database. table.tometadata() may be -# helpful here in case a "copy" of -# a MetaData is needed. -# from myapp import mymodel -# target_metadata = { -# 'engine1':mymodel.metadata1, -# 'engine2':mymodel.metadata2 -# } -target_metadata = {} - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def run_migrations_offline() -> None: - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - # for the --sql use case, run migrations for each URL into - # individual files. - - engines = {} - for name in re.split(r",\s*", db_names): - engines[name] = rec = {} - rec["url"] = context.config.get_section_option(name, "sqlalchemy.url") - - for name, rec in engines.items(): - logger.info("Migrating database %s" % name) - file_ = "%s.sql" % name - logger.info("Writing output to %s" % file_) - with open(file_, "w") as buffer: - context.configure( - url=rec["url"], - output_buffer=buffer, - target_metadata=target_metadata.get(name), - literal_binds=True, - dialect_opts={"paramstyle": "named"}, - ) - with context.begin_transaction(): - context.run_migrations(engine_name=name) - - -def run_migrations_online() -> None: - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - # for the direct-to-DB use case, start a transaction on all - # engines, then run all migrations, then commit all transactions. - - engines = {} - for name in re.split(r",\s*", db_names): - engines[name] = rec = {} - rec["engine"] = engine_from_config( - context.config.get_section(name, {}), - prefix="sqlalchemy.", - poolclass=pool.NullPool, - ) - - for name, rec in engines.items(): - engine = rec["engine"] - rec["connection"] = conn = engine.connect() - - if USE_TWOPHASE: - rec["transaction"] = conn.begin_twophase() - else: - rec["transaction"] = conn.begin() - - try: - for name, rec in engines.items(): - logger.info("Migrating database %s" % name) - context.configure( - connection=rec["connection"], - upgrade_token="%s_upgrades" % name, - downgrade_token="%s_downgrades" % name, - target_metadata=target_metadata.get(name), - ) - context.run_migrations(engine_name=name) - - if USE_TWOPHASE: - for rec in engines.values(): - rec["transaction"].prepare() - - for rec in engines.values(): - rec["transaction"].commit() - except: - for rec in engines.values(): - rec["transaction"].rollback() - raise - finally: - for rec in engines.values(): - rec["connection"].close() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/myenv/Lib/site-packages/alembic/templates/multidb/script.py.mako b/myenv/Lib/site-packages/alembic/templates/multidb/script.py.mako deleted file mode 100644 index 6108b8a..0000000 --- a/myenv/Lib/site-packages/alembic/templates/multidb/script.py.mako +++ /dev/null @@ -1,47 +0,0 @@ -<%! -import re - -%>"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from typing import Sequence, Union - -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision: str = ${repr(up_revision)} -down_revision: Union[str, None] = ${repr(down_revision)} -branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} -depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} - - -def upgrade(engine_name: str) -> None: - globals()["upgrade_%s" % engine_name]() - - -def downgrade(engine_name: str) -> None: - globals()["downgrade_%s" % engine_name]() - -<% - db_names = config.get_main_option("databases") -%> - -## generate an "upgrade_() / downgrade_()" function -## for each database name in the ini file. - -% for db_name in re.split(r',\s*', db_names): - -def upgrade_${db_name}() -> None: - ${context.get("%s_upgrades" % db_name, "pass")} - - -def downgrade_${db_name}() -> None: - ${context.get("%s_downgrades" % db_name, "pass")} - -% endfor diff --git a/myenv/Lib/site-packages/alembic/testing/__init__.py b/myenv/Lib/site-packages/alembic/testing/__init__.py deleted file mode 100644 index 0407adf..0000000 --- a/myenv/Lib/site-packages/alembic/testing/__init__.py +++ /dev/null @@ -1,29 +0,0 @@ -from sqlalchemy.testing import config -from sqlalchemy.testing import emits_warning -from sqlalchemy.testing import engines -from sqlalchemy.testing import exclusions -from sqlalchemy.testing import mock -from sqlalchemy.testing import provide_metadata -from sqlalchemy.testing import skip_if -from sqlalchemy.testing import uses_deprecated -from sqlalchemy.testing.config import combinations -from sqlalchemy.testing.config import fixture -from sqlalchemy.testing.config import requirements as requires - -from .assertions import assert_raises -from .assertions import assert_raises_message -from .assertions import emits_python_deprecation_warning -from .assertions import eq_ -from .assertions import eq_ignore_whitespace -from .assertions import expect_raises -from .assertions import expect_raises_message -from .assertions import expect_sqlalchemy_deprecated -from .assertions import expect_sqlalchemy_deprecated_20 -from .assertions import expect_warnings -from .assertions import is_ -from .assertions import is_false -from .assertions import is_not_ -from .assertions import is_true -from .assertions import ne_ -from .fixtures import TestBase -from .util import resolve_lambda diff --git a/myenv/Lib/site-packages/alembic/testing/assertions.py b/myenv/Lib/site-packages/alembic/testing/assertions.py deleted file mode 100644 index e071697..0000000 --- a/myenv/Lib/site-packages/alembic/testing/assertions.py +++ /dev/null @@ -1,176 +0,0 @@ -from __future__ import annotations - -import contextlib -import re -import sys -from typing import Any -from typing import Dict - -from sqlalchemy import exc as sa_exc -from sqlalchemy.engine import default -from sqlalchemy.testing.assertions import _expect_warnings -from sqlalchemy.testing.assertions import eq_ # noqa -from sqlalchemy.testing.assertions import is_ # noqa -from sqlalchemy.testing.assertions import is_false # noqa -from sqlalchemy.testing.assertions import is_not_ # noqa -from sqlalchemy.testing.assertions import is_true # noqa -from sqlalchemy.testing.assertions import ne_ # noqa -from sqlalchemy.util import decorator - -from ..util import sqla_compat - - -def _assert_proper_exception_context(exception): - """assert that any exception we're catching does not have a __context__ - without a __cause__, and that __suppress_context__ is never set. - - Python 3 will report nested as exceptions as "during the handling of - error X, error Y occurred". That's not what we want to do. we want - these exceptions in a cause chain. - - """ - - if ( - exception.__context__ is not exception.__cause__ - and not exception.__suppress_context__ - ): - assert False, ( - "Exception %r was correctly raised but did not set a cause, " - "within context %r as its cause." - % (exception, exception.__context__) - ) - - -def assert_raises(except_cls, callable_, *args, **kw): - return _assert_raises(except_cls, callable_, args, kw, check_context=True) - - -def assert_raises_context_ok(except_cls, callable_, *args, **kw): - return _assert_raises(except_cls, callable_, args, kw) - - -def assert_raises_message(except_cls, msg, callable_, *args, **kwargs): - return _assert_raises( - except_cls, callable_, args, kwargs, msg=msg, check_context=True - ) - - -def assert_raises_message_context_ok( - except_cls, msg, callable_, *args, **kwargs -): - return _assert_raises(except_cls, callable_, args, kwargs, msg=msg) - - -def _assert_raises( - except_cls, callable_, args, kwargs, msg=None, check_context=False -): - with _expect_raises(except_cls, msg, check_context) as ec: - callable_(*args, **kwargs) - return ec.error - - -class _ErrorContainer: - error: Any = None - - -@contextlib.contextmanager -def _expect_raises( - except_cls, msg=None, check_context=False, text_exact=False -): - ec = _ErrorContainer() - if check_context: - are_we_already_in_a_traceback = sys.exc_info()[0] - try: - yield ec - success = False - except except_cls as err: - ec.error = err - success = True - if msg is not None: - if text_exact: - assert str(err) == msg, f"{msg} != {err}" - else: - assert re.search(msg, str(err), re.UNICODE), f"{msg} !~ {err}" - if check_context and not are_we_already_in_a_traceback: - _assert_proper_exception_context(err) - print(str(err).encode("utf-8")) - - # assert outside the block so it works for AssertionError too ! - assert success, "Callable did not raise an exception" - - -def expect_raises(except_cls, check_context=True): - return _expect_raises(except_cls, check_context=check_context) - - -def expect_raises_message( - except_cls, msg, check_context=True, text_exact=False -): - return _expect_raises( - except_cls, msg=msg, check_context=check_context, text_exact=text_exact - ) - - -def eq_ignore_whitespace(a, b, msg=None): - a = re.sub(r"^\s+?|\n", "", a) - a = re.sub(r" {2,}", " ", a) - b = re.sub(r"^\s+?|\n", "", b) - b = re.sub(r" {2,}", " ", b) - - assert a == b, msg or "%r != %r" % (a, b) - - -_dialect_mods: Dict[Any, Any] = {} - - -def _get_dialect(name): - if name is None or name == "default": - return default.DefaultDialect() - else: - d = sqla_compat._create_url(name).get_dialect()() - - if name == "postgresql": - d.implicit_returning = True - elif name == "mssql": - d.legacy_schema_aliasing = False - return d - - -def expect_warnings(*messages, **kw): - """Context manager which expects one or more warnings. - - With no arguments, squelches all SAWarnings emitted via - sqlalchemy.util.warn and sqlalchemy.util.warn_limited. Otherwise - pass string expressions that will match selected warnings via regex; - all non-matching warnings are sent through. - - The expect version **asserts** that the warnings were in fact seen. - - Note that the test suite sets SAWarning warnings to raise exceptions. - - """ - return _expect_warnings(Warning, messages, **kw) - - -def emits_python_deprecation_warning(*messages): - """Decorator form of expect_warnings(). - - Note that emits_warning does **not** assert that the warnings - were in fact seen. - - """ - - @decorator - def decorate(fn, *args, **kw): - with _expect_warnings(DeprecationWarning, assert_=False, *messages): - return fn(*args, **kw) - - return decorate - - -def expect_sqlalchemy_deprecated(*messages, **kw): - return _expect_warnings(sa_exc.SADeprecationWarning, messages, **kw) - - -def expect_sqlalchemy_deprecated_20(*messages, **kw): - return _expect_warnings(sa_exc.RemovedIn20Warning, messages, **kw) diff --git a/myenv/Lib/site-packages/alembic/testing/env.py b/myenv/Lib/site-packages/alembic/testing/env.py deleted file mode 100644 index 5df7ef8..0000000 --- a/myenv/Lib/site-packages/alembic/testing/env.py +++ /dev/null @@ -1,518 +0,0 @@ -import importlib.machinery -import os -import shutil -import textwrap - -from sqlalchemy.testing import config -from sqlalchemy.testing import provision - -from . import util as testing_util -from .. import command -from .. import script -from .. import util -from ..script import Script -from ..script import ScriptDirectory - - -def _get_staging_directory(): - if provision.FOLLOWER_IDENT: - return "scratch_%s" % provision.FOLLOWER_IDENT - else: - return "scratch" - - -def staging_env(create=True, template="generic", sourceless=False): - cfg = _testing_config() - if create: - path = os.path.join(_get_staging_directory(), "scripts") - assert not os.path.exists(path), ( - "staging directory %s already exists; poor cleanup?" % path - ) - - command.init(cfg, path, template=template) - if sourceless: - try: - # do an import so that a .pyc/.pyo is generated. - util.load_python_file(path, "env.py") - except AttributeError: - # we don't have the migration context set up yet - # so running the .env py throws this exception. - # theoretically we could be using py_compiler here to - # generate .pyc/.pyo without importing but not really - # worth it. - pass - assert sourceless in ( - "pep3147_envonly", - "simple", - "pep3147_everything", - ), sourceless - make_sourceless( - os.path.join(path, "env.py"), - "pep3147" if "pep3147" in sourceless else "simple", - ) - - sc = script.ScriptDirectory.from_config(cfg) - return sc - - -def clear_staging_env(): - from sqlalchemy.testing import engines - - engines.testing_reaper.close_all() - shutil.rmtree(_get_staging_directory(), True) - - -def script_file_fixture(txt): - dir_ = os.path.join(_get_staging_directory(), "scripts") - path = os.path.join(dir_, "script.py.mako") - with open(path, "w") as f: - f.write(txt) - - -def env_file_fixture(txt): - dir_ = os.path.join(_get_staging_directory(), "scripts") - txt = ( - """ -from alembic import context - -config = context.config -""" - + txt - ) - - path = os.path.join(dir_, "env.py") - pyc_path = util.pyc_file_from_path(path) - if pyc_path: - os.unlink(pyc_path) - - with open(path, "w") as f: - f.write(txt) - - -def _sqlite_file_db(tempname="foo.db", future=False, scope=None, **options): - dir_ = os.path.join(_get_staging_directory(), "scripts") - url = "sqlite:///%s/%s" % (dir_, tempname) - if scope and util.sqla_14: - options["scope"] = scope - return testing_util.testing_engine(url=url, future=future, options=options) - - -def _sqlite_testing_config(sourceless=False, future=False): - dir_ = os.path.join(_get_staging_directory(), "scripts") - url = "sqlite:///%s/foo.db" % dir_ - - sqlalchemy_future = future or ("future" in config.db.__class__.__module__) - - return _write_config_file( - """ -[alembic] -script_location = %s -sqlalchemy.url = %s -sourceless = %s -%s - -[loggers] -keys = root,sqlalchemy - -[handlers] -keys = console - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = DEBUG -handlers = -qualname = sqlalchemy.engine - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatters] -keys = generic - -[formatter_generic] -format = %%(levelname)-5.5s [%%(name)s] %%(message)s -datefmt = %%H:%%M:%%S - """ - % ( - dir_, - url, - "true" if sourceless else "false", - "sqlalchemy.future = true" if sqlalchemy_future else "", - ) - ) - - -def _multi_dir_testing_config(sourceless=False, extra_version_location=""): - dir_ = os.path.join(_get_staging_directory(), "scripts") - sqlalchemy_future = "future" in config.db.__class__.__module__ - - url = "sqlite:///%s/foo.db" % dir_ - - return _write_config_file( - """ -[alembic] -script_location = %s -sqlalchemy.url = %s -sqlalchemy.future = %s -sourceless = %s -version_locations = %%(here)s/model1/ %%(here)s/model2/ %%(here)s/model3/ %s - -[loggers] -keys = root - -[handlers] -keys = console - -[logger_root] -level = WARN -handlers = console -qualname = - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatters] -keys = generic - -[formatter_generic] -format = %%(levelname)-5.5s [%%(name)s] %%(message)s -datefmt = %%H:%%M:%%S - """ - % ( - dir_, - url, - "true" if sqlalchemy_future else "false", - "true" if sourceless else "false", - extra_version_location, - ) - ) - - -def _no_sql_testing_config(dialect="postgresql", directives=""): - """use a postgresql url with no host so that - connections guaranteed to fail""" - dir_ = os.path.join(_get_staging_directory(), "scripts") - return _write_config_file( - """ -[alembic] -script_location = %s -sqlalchemy.url = %s:// -%s - -[loggers] -keys = root - -[handlers] -keys = console - -[logger_root] -level = WARN -handlers = console -qualname = - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatters] -keys = generic - -[formatter_generic] -format = %%(levelname)-5.5s [%%(name)s] %%(message)s -datefmt = %%H:%%M:%%S - -""" - % (dir_, dialect, directives) - ) - - -def _write_config_file(text): - cfg = _testing_config() - with open(cfg.config_file_name, "w") as f: - f.write(text) - return cfg - - -def _testing_config(): - from alembic.config import Config - - if not os.access(_get_staging_directory(), os.F_OK): - os.mkdir(_get_staging_directory()) - return Config(os.path.join(_get_staging_directory(), "test_alembic.ini")) - - -def write_script( - scriptdir, rev_id, content, encoding="ascii", sourceless=False -): - old = scriptdir.revision_map.get_revision(rev_id) - path = old.path - - content = textwrap.dedent(content) - if encoding: - content = content.encode(encoding) - with open(path, "wb") as fp: - fp.write(content) - pyc_path = util.pyc_file_from_path(path) - if pyc_path: - os.unlink(pyc_path) - script = Script._from_path(scriptdir, path) - old = scriptdir.revision_map.get_revision(script.revision) - if old.down_revision != script.down_revision: - raise Exception( - "Can't change down_revision " "on a refresh operation." - ) - scriptdir.revision_map.add_revision(script, _replace=True) - - if sourceless: - make_sourceless( - path, "pep3147" if sourceless == "pep3147_everything" else "simple" - ) - - -def make_sourceless(path, style): - import py_compile - - py_compile.compile(path) - - if style == "simple": - pyc_path = util.pyc_file_from_path(path) - suffix = importlib.machinery.BYTECODE_SUFFIXES[0] - filepath, ext = os.path.splitext(path) - simple_pyc_path = filepath + suffix - shutil.move(pyc_path, simple_pyc_path) - pyc_path = simple_pyc_path - else: - assert style in ("pep3147", "simple") - pyc_path = util.pyc_file_from_path(path) - - assert os.access(pyc_path, os.F_OK) - - os.unlink(path) - - -def three_rev_fixture(cfg): - a = util.rev_id() - b = util.rev_id() - c = util.rev_id() - - script = ScriptDirectory.from_config(cfg) - script.generate_revision(a, "revision a", refresh=True, head="base") - write_script( - script, - a, - """\ -"Rev A" -revision = '%s' -down_revision = None - -from alembic import op - - -def upgrade(): - op.execute("CREATE STEP 1") - - -def downgrade(): - op.execute("DROP STEP 1") - -""" - % a, - ) - - script.generate_revision(b, "revision b", refresh=True, head=a) - write_script( - script, - b, - f"""# coding: utf-8 -"Rev B, méil, %3" -revision = '{b}' -down_revision = '{a}' - -from alembic import op - - -def upgrade(): - op.execute("CREATE STEP 2") - - -def downgrade(): - op.execute("DROP STEP 2") - -""", - encoding="utf-8", - ) - - script.generate_revision(c, "revision c", refresh=True, head=b) - write_script( - script, - c, - """\ -"Rev C" -revision = '%s' -down_revision = '%s' - -from alembic import op - - -def upgrade(): - op.execute("CREATE STEP 3") - - -def downgrade(): - op.execute("DROP STEP 3") - -""" - % (c, b), - ) - return a, b, c - - -def multi_heads_fixture(cfg, a, b, c): - """Create a multiple head fixture from the three-revs fixture""" - - # a->b->c - # -> d -> e - # -> f - d = util.rev_id() - e = util.rev_id() - f = util.rev_id() - - script = ScriptDirectory.from_config(cfg) - script.generate_revision( - d, "revision d from b", head=b, splice=True, refresh=True - ) - write_script( - script, - d, - """\ -"Rev D" -revision = '%s' -down_revision = '%s' - -from alembic import op - - -def upgrade(): - op.execute("CREATE STEP 4") - - -def downgrade(): - op.execute("DROP STEP 4") - -""" - % (d, b), - ) - - script.generate_revision( - e, "revision e from d", head=d, splice=True, refresh=True - ) - write_script( - script, - e, - """\ -"Rev E" -revision = '%s' -down_revision = '%s' - -from alembic import op - - -def upgrade(): - op.execute("CREATE STEP 5") - - -def downgrade(): - op.execute("DROP STEP 5") - -""" - % (e, d), - ) - - script.generate_revision( - f, "revision f from b", head=b, splice=True, refresh=True - ) - write_script( - script, - f, - """\ -"Rev F" -revision = '%s' -down_revision = '%s' - -from alembic import op - - -def upgrade(): - op.execute("CREATE STEP 6") - - -def downgrade(): - op.execute("DROP STEP 6") - -""" - % (f, b), - ) - - return d, e, f - - -def _multidb_testing_config(engines): - """alembic.ini fixture to work exactly with the 'multidb' template""" - - dir_ = os.path.join(_get_staging_directory(), "scripts") - - sqlalchemy_future = "future" in config.db.__class__.__module__ - - databases = ", ".join(engines.keys()) - engines = "\n\n".join( - "[%s]\n" "sqlalchemy.url = %s" % (key, value.url) - for key, value in engines.items() - ) - - return _write_config_file( - """ -[alembic] -script_location = %s -sourceless = false -sqlalchemy.future = %s -databases = %s - -%s -[loggers] -keys = root - -[handlers] -keys = console - -[logger_root] -level = WARN -handlers = console -qualname = - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatters] -keys = generic - -[formatter_generic] -format = %%(levelname)-5.5s [%%(name)s] %%(message)s -datefmt = %%H:%%M:%%S - """ - % (dir_, "true" if sqlalchemy_future else "false", databases, engines) - ) diff --git a/myenv/Lib/site-packages/alembic/testing/fixtures.py b/myenv/Lib/site-packages/alembic/testing/fixtures.py deleted file mode 100644 index 3b5ce59..0000000 --- a/myenv/Lib/site-packages/alembic/testing/fixtures.py +++ /dev/null @@ -1,318 +0,0 @@ -from __future__ import annotations - -import configparser -from contextlib import contextmanager -import io -import re -from typing import Any -from typing import Dict - -from sqlalchemy import Column -from sqlalchemy import inspect -from sqlalchemy import MetaData -from sqlalchemy import String -from sqlalchemy import Table -from sqlalchemy import testing -from sqlalchemy import text -from sqlalchemy.testing import config -from sqlalchemy.testing import mock -from sqlalchemy.testing.assertions import eq_ -from sqlalchemy.testing.fixtures import TablesTest as SQLAlchemyTablesTest -from sqlalchemy.testing.fixtures import TestBase as SQLAlchemyTestBase - -import alembic -from .assertions import _get_dialect -from ..environment import EnvironmentContext -from ..migration import MigrationContext -from ..operations import Operations -from ..util import sqla_compat -from ..util.sqla_compat import create_mock_engine -from ..util.sqla_compat import sqla_14 -from ..util.sqla_compat import sqla_2 - - -testing_config = configparser.ConfigParser() -testing_config.read(["test.cfg"]) - - -class TestBase(SQLAlchemyTestBase): - is_sqlalchemy_future = sqla_2 - - @testing.fixture() - def ops_context(self, migration_context): - with migration_context.begin_transaction(_per_migration=True): - yield Operations(migration_context) - - @testing.fixture - def migration_context(self, connection): - return MigrationContext.configure( - connection, opts=dict(transaction_per_migration=True) - ) - - @testing.fixture - def as_sql_migration_context(self, connection): - return MigrationContext.configure( - connection, opts=dict(transaction_per_migration=True, as_sql=True) - ) - - @testing.fixture - def connection(self): - with config.db.connect() as conn: - yield conn - - -class TablesTest(TestBase, SQLAlchemyTablesTest): - pass - - -if sqla_14: - from sqlalchemy.testing.fixtures import FutureEngineMixin -else: - - class FutureEngineMixin: # type:ignore[no-redef] - __requires__ = ("sqlalchemy_14",) - - -FutureEngineMixin.is_sqlalchemy_future = True - - -def capture_db(dialect="postgresql://"): - buf = [] - - def dump(sql, *multiparams, **params): - buf.append(str(sql.compile(dialect=engine.dialect))) - - engine = create_mock_engine(dialect, dump) - return engine, buf - - -_engs: Dict[Any, Any] = {} - - -@contextmanager -def capture_context_buffer(**kw): - if kw.pop("bytes_io", False): - buf = io.BytesIO() - else: - buf = io.StringIO() - - kw.update({"dialect_name": "sqlite", "output_buffer": buf}) - conf = EnvironmentContext.configure - - def configure(*arg, **opt): - opt.update(**kw) - return conf(*arg, **opt) - - with mock.patch.object(EnvironmentContext, "configure", configure): - yield buf - - -@contextmanager -def capture_engine_context_buffer(**kw): - from .env import _sqlite_file_db - from sqlalchemy import event - - buf = io.StringIO() - - eng = _sqlite_file_db() - - conn = eng.connect() - - @event.listens_for(conn, "before_cursor_execute") - def bce(conn, cursor, statement, parameters, context, executemany): - buf.write(statement + "\n") - - kw.update({"connection": conn}) - conf = EnvironmentContext.configure - - def configure(*arg, **opt): - opt.update(**kw) - return conf(*arg, **opt) - - with mock.patch.object(EnvironmentContext, "configure", configure): - yield buf - - -def op_fixture( - dialect="default", - as_sql=False, - naming_convention=None, - literal_binds=False, - native_boolean=None, -): - opts = {} - if naming_convention: - opts["target_metadata"] = MetaData(naming_convention=naming_convention) - - class buffer_: - def __init__(self): - self.lines = [] - - def write(self, msg): - msg = msg.strip() - msg = re.sub(r"[\n\t]", "", msg) - if as_sql: - # the impl produces soft tabs, - # so search for blocks of 4 spaces - msg = re.sub(r" ", "", msg) - msg = re.sub(r"\;\n*$", "", msg) - - self.lines.append(msg) - - def flush(self): - pass - - buf = buffer_() - - class ctx(MigrationContext): - def get_buf(self): - return buf - - def clear_assertions(self): - buf.lines[:] = [] - - def assert_(self, *sql): - # TODO: make this more flexible about - # whitespace and such - eq_(buf.lines, [re.sub(r"[\n\t]", "", s) for s in sql]) - - def assert_contains(self, sql): - for stmt in buf.lines: - if re.sub(r"[\n\t]", "", sql) in stmt: - return - else: - assert False, "Could not locate fragment %r in %r" % ( - sql, - buf.lines, - ) - - if as_sql: - opts["as_sql"] = as_sql - if literal_binds: - opts["literal_binds"] = literal_binds - if not sqla_14 and dialect == "mariadb": - ctx_dialect = _get_dialect("mysql") - ctx_dialect.server_version_info = (10, 4, 0, "MariaDB") - - else: - ctx_dialect = _get_dialect(dialect) - if native_boolean is not None: - ctx_dialect.supports_native_boolean = native_boolean - # this is new as of SQLAlchemy 1.2.7 and is used by SQL Server, - # which breaks assumptions in the alembic test suite - ctx_dialect.non_native_boolean_check_constraint = True - if not as_sql: - - def execute(stmt, *multiparam, **param): - if isinstance(stmt, str): - stmt = text(stmt) - assert stmt.supports_execution - sql = str(stmt.compile(dialect=ctx_dialect)) - - buf.write(sql) - - connection = mock.Mock(dialect=ctx_dialect, execute=execute) - else: - opts["output_buffer"] = buf - connection = None - context = ctx(ctx_dialect, connection, opts) - - alembic.op._proxy = Operations(context) - return context - - -class AlterColRoundTripFixture: - # since these tests are about syntax, use more recent SQLAlchemy as some of - # the type / server default compare logic might not work on older - # SQLAlchemy versions as seems to be the case for SQLAlchemy 1.1 on Oracle - - __requires__ = ("alter_column",) - - def setUp(self): - self.conn = config.db.connect() - self.ctx = MigrationContext.configure(self.conn) - self.op = Operations(self.ctx) - self.metadata = MetaData() - - def _compare_type(self, t1, t2): - c1 = Column("q", t1) - c2 = Column("q", t2) - assert not self.ctx.impl.compare_type( - c1, c2 - ), "Type objects %r and %r didn't compare as equivalent" % (t1, t2) - - def _compare_server_default(self, t1, s1, t2, s2): - c1 = Column("q", t1, server_default=s1) - c2 = Column("q", t2, server_default=s2) - assert not self.ctx.impl.compare_server_default( - c1, c2, s2, s1 - ), "server defaults %r and %r didn't compare as equivalent" % (s1, s2) - - def tearDown(self): - sqla_compat._safe_rollback_connection_transaction(self.conn) - with self.conn.begin(): - self.metadata.drop_all(self.conn) - self.conn.close() - - def _run_alter_col(self, from_, to_, compare=None): - column = Column( - from_.get("name", "colname"), - from_.get("type", String(10)), - nullable=from_.get("nullable", True), - server_default=from_.get("server_default", None), - # comment=from_.get("comment", None) - ) - t = Table("x", self.metadata, column) - - with sqla_compat._ensure_scope_for_ddl(self.conn): - t.create(self.conn) - insp = inspect(self.conn) - old_col = insp.get_columns("x")[0] - - # TODO: conditional comment support - self.op.alter_column( - "x", - column.name, - existing_type=column.type, - existing_server_default=( - column.server_default - if column.server_default is not None - else False - ), - existing_nullable=True if column.nullable else False, - # existing_comment=column.comment, - nullable=to_.get("nullable", None), - # modify_comment=False, - server_default=to_.get("server_default", False), - new_column_name=to_.get("name", None), - type_=to_.get("type", None), - ) - - insp = inspect(self.conn) - new_col = insp.get_columns("x")[0] - - if compare is None: - compare = to_ - - eq_( - new_col["name"], - compare["name"] if "name" in compare else column.name, - ) - self._compare_type( - new_col["type"], compare.get("type", old_col["type"]) - ) - eq_(new_col["nullable"], compare.get("nullable", column.nullable)) - self._compare_server_default( - new_col["type"], - new_col.get("default", None), - compare.get("type", old_col["type"]), - ( - compare["server_default"].text - if "server_default" in compare - else ( - column.server_default.arg.text - if column.server_default is not None - else None - ) - ), - ) diff --git a/myenv/Lib/site-packages/alembic/testing/plugin/__init__.py b/myenv/Lib/site-packages/alembic/testing/plugin/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/alembic/testing/plugin/bootstrap.py b/myenv/Lib/site-packages/alembic/testing/plugin/bootstrap.py deleted file mode 100644 index d4a2c55..0000000 --- a/myenv/Lib/site-packages/alembic/testing/plugin/bootstrap.py +++ /dev/null @@ -1,4 +0,0 @@ -""" -Bootstrapper for test framework plugins. - -""" diff --git a/myenv/Lib/site-packages/alembic/testing/requirements.py b/myenv/Lib/site-packages/alembic/testing/requirements.py deleted file mode 100644 index 6e07e28..0000000 --- a/myenv/Lib/site-packages/alembic/testing/requirements.py +++ /dev/null @@ -1,210 +0,0 @@ -from sqlalchemy.testing.requirements import Requirements - -from alembic import util -from alembic.util import sqla_compat -from ..testing import exclusions - - -class SuiteRequirements(Requirements): - @property - def schemas(self): - """Target database must support external schemas, and have one - named 'test_schema'.""" - - return exclusions.open() - - @property - def autocommit_isolation(self): - """target database should support 'AUTOCOMMIT' isolation level""" - - return exclusions.closed() - - @property - def materialized_views(self): - """needed for sqlalchemy compat""" - return exclusions.closed() - - @property - def unique_constraint_reflection(self): - def doesnt_have_check_uq_constraints(config): - from sqlalchemy import inspect - - insp = inspect(config.db) - try: - insp.get_unique_constraints("x") - except NotImplementedError: - return True - except TypeError: - return True - except Exception: - pass - return False - - return exclusions.skip_if(doesnt_have_check_uq_constraints) - - @property - def sequences(self): - """Target database must support SEQUENCEs.""" - - return exclusions.only_if( - [lambda config: config.db.dialect.supports_sequences], - "no sequence support", - ) - - @property - def foreign_key_match(self): - return exclusions.open() - - @property - def foreign_key_constraint_reflection(self): - return exclusions.open() - - @property - def check_constraints_w_enforcement(self): - """Target database must support check constraints - and also enforce them.""" - - return exclusions.open() - - @property - def reflects_pk_names(self): - return exclusions.closed() - - @property - def reflects_fk_options(self): - return exclusions.closed() - - @property - def sqlalchemy_14(self): - return exclusions.skip_if( - lambda config: not util.sqla_14, - "SQLAlchemy 1.4 or greater required", - ) - - @property - def sqlalchemy_1x(self): - return exclusions.skip_if( - lambda config: util.sqla_2, - "SQLAlchemy 1.x test", - ) - - @property - def sqlalchemy_2(self): - return exclusions.skip_if( - lambda config: not util.sqla_2, - "SQLAlchemy 2.x test", - ) - - @property - def asyncio(self): - def go(config): - try: - import greenlet # noqa: F401 - except ImportError: - return False - else: - return True - - return self.sqlalchemy_14 + exclusions.only_if(go) - - @property - def comments(self): - return exclusions.only_if( - lambda config: config.db.dialect.supports_comments - ) - - @property - def alter_column(self): - return exclusions.open() - - @property - def computed_columns(self): - return exclusions.closed() - - @property - def computed_columns_api(self): - return exclusions.only_if( - exclusions.BooleanPredicate(sqla_compat.has_computed) - ) - - @property - def computed_reflects_normally(self): - return exclusions.only_if( - exclusions.BooleanPredicate(sqla_compat.has_computed_reflection) - ) - - @property - def computed_reflects_as_server_default(self): - return exclusions.closed() - - @property - def computed_doesnt_reflect_as_server_default(self): - return exclusions.closed() - - @property - def autoincrement_on_composite_pk(self): - return exclusions.closed() - - @property - def fk_ondelete_is_reflected(self): - return exclusions.closed() - - @property - def fk_onupdate_is_reflected(self): - return exclusions.closed() - - @property - def fk_onupdate(self): - return exclusions.open() - - @property - def fk_ondelete_restrict(self): - return exclusions.open() - - @property - def fk_onupdate_restrict(self): - return exclusions.open() - - @property - def fk_ondelete_noaction(self): - return exclusions.open() - - @property - def fk_initially(self): - return exclusions.closed() - - @property - def fk_deferrable(self): - return exclusions.closed() - - @property - def fk_deferrable_is_reflected(self): - return exclusions.closed() - - @property - def fk_names(self): - return exclusions.open() - - @property - def integer_subtype_comparisons(self): - return exclusions.open() - - @property - def no_name_normalize(self): - return exclusions.skip_if( - lambda config: config.db.dialect.requires_name_normalize - ) - - @property - def identity_columns(self): - return exclusions.closed() - - @property - def identity_columns_alter(self): - return exclusions.closed() - - @property - def identity_columns_api(self): - return exclusions.only_if( - exclusions.BooleanPredicate(sqla_compat.has_identity) - ) diff --git a/myenv/Lib/site-packages/alembic/testing/schemacompare.py b/myenv/Lib/site-packages/alembic/testing/schemacompare.py deleted file mode 100644 index 204cc4d..0000000 --- a/myenv/Lib/site-packages/alembic/testing/schemacompare.py +++ /dev/null @@ -1,169 +0,0 @@ -from itertools import zip_longest - -from sqlalchemy import schema -from sqlalchemy.sql.elements import ClauseList - - -class CompareTable: - def __init__(self, table): - self.table = table - - def __eq__(self, other): - if self.table.name != other.name or self.table.schema != other.schema: - return False - - for c1, c2 in zip_longest(self.table.c, other.c): - if (c1 is None and c2 is not None) or ( - c2 is None and c1 is not None - ): - return False - if CompareColumn(c1) != c2: - return False - - return True - - # TODO: compare constraints, indexes - - def __ne__(self, other): - return not self.__eq__(other) - - -class CompareColumn: - def __init__(self, column): - self.column = column - - def __eq__(self, other): - return ( - self.column.name == other.name - and self.column.nullable == other.nullable - ) - # TODO: datatypes etc - - def __ne__(self, other): - return not self.__eq__(other) - - -class CompareIndex: - def __init__(self, index, name_only=False): - self.index = index - self.name_only = name_only - - def __eq__(self, other): - if self.name_only: - return self.index.name == other.name - else: - return ( - str(schema.CreateIndex(self.index)) - == str(schema.CreateIndex(other)) - and self.index.dialect_kwargs == other.dialect_kwargs - ) - - def __ne__(self, other): - return not self.__eq__(other) - - def __repr__(self): - expr = ClauseList(*self.index.expressions) - try: - expr_str = expr.compile().string - except Exception: - expr_str = str(expr) - return f"" - - -class CompareCheckConstraint: - def __init__(self, constraint): - self.constraint = constraint - - def __eq__(self, other): - return ( - isinstance(other, schema.CheckConstraint) - and self.constraint.name == other.name - and (str(self.constraint.sqltext) == str(other.sqltext)) - and (other.table.name == self.constraint.table.name) - and other.table.schema == self.constraint.table.schema - ) - - def __ne__(self, other): - return not self.__eq__(other) - - -class CompareForeignKey: - def __init__(self, constraint): - self.constraint = constraint - - def __eq__(self, other): - r1 = ( - isinstance(other, schema.ForeignKeyConstraint) - and self.constraint.name == other.name - and (other.table.name == self.constraint.table.name) - and other.table.schema == self.constraint.table.schema - ) - if not r1: - return False - for c1, c2 in zip_longest(self.constraint.columns, other.columns): - if (c1 is None and c2 is not None) or ( - c2 is None and c1 is not None - ): - return False - if CompareColumn(c1) != c2: - return False - return True - - def __ne__(self, other): - return not self.__eq__(other) - - -class ComparePrimaryKey: - def __init__(self, constraint): - self.constraint = constraint - - def __eq__(self, other): - r1 = ( - isinstance(other, schema.PrimaryKeyConstraint) - and self.constraint.name == other.name - and (other.table.name == self.constraint.table.name) - and other.table.schema == self.constraint.table.schema - ) - if not r1: - return False - - for c1, c2 in zip_longest(self.constraint.columns, other.columns): - if (c1 is None and c2 is not None) or ( - c2 is None and c1 is not None - ): - return False - if CompareColumn(c1) != c2: - return False - - return True - - def __ne__(self, other): - return not self.__eq__(other) - - -class CompareUniqueConstraint: - def __init__(self, constraint): - self.constraint = constraint - - def __eq__(self, other): - r1 = ( - isinstance(other, schema.UniqueConstraint) - and self.constraint.name == other.name - and (other.table.name == self.constraint.table.name) - and other.table.schema == self.constraint.table.schema - ) - if not r1: - return False - - for c1, c2 in zip_longest(self.constraint.columns, other.columns): - if (c1 is None and c2 is not None) or ( - c2 is None and c1 is not None - ): - return False - if CompareColumn(c1) != c2: - return False - - return True - - def __ne__(self, other): - return not self.__eq__(other) diff --git a/myenv/Lib/site-packages/alembic/testing/suite/__init__.py b/myenv/Lib/site-packages/alembic/testing/suite/__init__.py deleted file mode 100644 index 3da498d..0000000 --- a/myenv/Lib/site-packages/alembic/testing/suite/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from .test_autogen_comments import * # noqa -from .test_autogen_computed import * # noqa -from .test_autogen_diffs import * # noqa -from .test_autogen_fks import * # noqa -from .test_autogen_identity import * # noqa -from .test_environment import * # noqa -from .test_op import * # noqa diff --git a/myenv/Lib/site-packages/alembic/testing/suite/_autogen_fixtures.py b/myenv/Lib/site-packages/alembic/testing/suite/_autogen_fixtures.py deleted file mode 100644 index d838ebe..0000000 --- a/myenv/Lib/site-packages/alembic/testing/suite/_autogen_fixtures.py +++ /dev/null @@ -1,335 +0,0 @@ -from __future__ import annotations - -from typing import Any -from typing import Dict -from typing import Set - -from sqlalchemy import CHAR -from sqlalchemy import CheckConstraint -from sqlalchemy import Column -from sqlalchemy import event -from sqlalchemy import ForeignKey -from sqlalchemy import Index -from sqlalchemy import inspect -from sqlalchemy import Integer -from sqlalchemy import MetaData -from sqlalchemy import Numeric -from sqlalchemy import String -from sqlalchemy import Table -from sqlalchemy import Text -from sqlalchemy import text -from sqlalchemy import UniqueConstraint - -from ... import autogenerate -from ... import util -from ...autogenerate import api -from ...ddl.base import _fk_spec -from ...migration import MigrationContext -from ...operations import ops -from ...testing import config -from ...testing import eq_ -from ...testing.env import clear_staging_env -from ...testing.env import staging_env - -names_in_this_test: Set[Any] = set() - - -@event.listens_for(Table, "after_parent_attach") -def new_table(table, parent): - names_in_this_test.add(table.name) - - -def _default_include_object(obj, name, type_, reflected, compare_to): - if type_ == "table": - return name in names_in_this_test - else: - return True - - -_default_object_filters: Any = _default_include_object - -_default_name_filters: Any = None - - -class ModelOne: - __requires__ = ("unique_constraint_reflection",) - - schema: Any = None - - @classmethod - def _get_db_schema(cls): - schema = cls.schema - - m = MetaData(schema=schema) - - Table( - "user", - m, - Column("id", Integer, primary_key=True), - Column("name", String(50)), - Column("a1", Text), - Column("pw", String(50)), - Index("pw_idx", "pw"), - ) - - Table( - "address", - m, - Column("id", Integer, primary_key=True), - Column("email_address", String(100), nullable=False), - ) - - Table( - "order", - m, - Column("order_id", Integer, primary_key=True), - Column( - "amount", - Numeric(8, 2), - nullable=False, - server_default=text("0"), - ), - CheckConstraint("amount >= 0", name="ck_order_amount"), - ) - - Table( - "extra", - m, - Column("x", CHAR), - Column("uid", Integer, ForeignKey("user.id")), - ) - - return m - - @classmethod - def _get_model_schema(cls): - schema = cls.schema - - m = MetaData(schema=schema) - - Table( - "user", - m, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", Text, server_default="x"), - ) - - Table( - "address", - m, - Column("id", Integer, primary_key=True), - Column("email_address", String(100), nullable=False), - Column("street", String(50)), - UniqueConstraint("email_address", name="uq_email"), - ) - - Table( - "order", - m, - Column("order_id", Integer, primary_key=True), - Column( - "amount", - Numeric(10, 2), - nullable=True, - server_default=text("0"), - ), - Column("user_id", Integer, ForeignKey("user.id")), - CheckConstraint("amount > -1", name="ck_order_amount"), - ) - - Table( - "item", - m, - Column("id", Integer, primary_key=True), - Column("description", String(100)), - Column("order_id", Integer, ForeignKey("order.order_id")), - CheckConstraint("len(description) > 5"), - ) - return m - - -class _ComparesFKs: - def _assert_fk_diff( - self, - diff, - type_, - source_table, - source_columns, - target_table, - target_columns, - name=None, - conditional_name=None, - source_schema=None, - onupdate=None, - ondelete=None, - initially=None, - deferrable=None, - ): - # the public API for ForeignKeyConstraint was not very rich - # in 0.7, 0.8, so here we use the well-known but slightly - # private API to get at its elements - ( - fk_source_schema, - fk_source_table, - fk_source_columns, - fk_target_schema, - fk_target_table, - fk_target_columns, - fk_onupdate, - fk_ondelete, - fk_deferrable, - fk_initially, - ) = _fk_spec(diff[1]) - - eq_(diff[0], type_) - eq_(fk_source_table, source_table) - eq_(fk_source_columns, source_columns) - eq_(fk_target_table, target_table) - eq_(fk_source_schema, source_schema) - eq_(fk_onupdate, onupdate) - eq_(fk_ondelete, ondelete) - eq_(fk_initially, initially) - eq_(fk_deferrable, deferrable) - - eq_([elem.column.name for elem in diff[1].elements], target_columns) - if conditional_name is not None: - if conditional_name == "servergenerated": - fks = inspect(self.bind).get_foreign_keys(source_table) - server_fk_name = fks[0]["name"] - eq_(diff[1].name, server_fk_name) - else: - eq_(diff[1].name, conditional_name) - else: - eq_(diff[1].name, name) - - -class AutogenTest(_ComparesFKs): - def _flatten_diffs(self, diffs): - for d in diffs: - if isinstance(d, list): - yield from self._flatten_diffs(d) - else: - yield d - - @classmethod - def _get_bind(cls): - return config.db - - configure_opts: Dict[Any, Any] = {} - - @classmethod - def setup_class(cls): - staging_env() - cls.bind = cls._get_bind() - cls.m1 = cls._get_db_schema() - cls.m1.create_all(cls.bind) - cls.m2 = cls._get_model_schema() - - @classmethod - def teardown_class(cls): - cls.m1.drop_all(cls.bind) - clear_staging_env() - - def setUp(self): - self.conn = conn = self.bind.connect() - ctx_opts = { - "compare_type": True, - "compare_server_default": True, - "target_metadata": self.m2, - "upgrade_token": "upgrades", - "downgrade_token": "downgrades", - "alembic_module_prefix": "op.", - "sqlalchemy_module_prefix": "sa.", - "include_object": _default_object_filters, - "include_name": _default_name_filters, - } - if self.configure_opts: - ctx_opts.update(self.configure_opts) - self.context = context = MigrationContext.configure( - connection=conn, opts=ctx_opts - ) - - self.autogen_context = api.AutogenContext(context, self.m2) - - def tearDown(self): - self.conn.close() - - def _update_context( - self, object_filters=None, name_filters=None, include_schemas=None - ): - if include_schemas is not None: - self.autogen_context.opts["include_schemas"] = include_schemas - if object_filters is not None: - self.autogen_context._object_filters = [object_filters] - if name_filters is not None: - self.autogen_context._name_filters = [name_filters] - return self.autogen_context - - -class AutogenFixtureTest(_ComparesFKs): - def _fixture( - self, - m1, - m2, - include_schemas=False, - opts=None, - object_filters=_default_object_filters, - name_filters=_default_name_filters, - return_ops=False, - max_identifier_length=None, - ): - if max_identifier_length: - dialect = self.bind.dialect - existing_length = dialect.max_identifier_length - dialect.max_identifier_length = ( - dialect._user_defined_max_identifier_length - ) = max_identifier_length - try: - self._alembic_metadata, model_metadata = m1, m2 - for m in util.to_list(self._alembic_metadata): - m.create_all(self.bind) - - with self.bind.connect() as conn: - ctx_opts = { - "compare_type": True, - "compare_server_default": True, - "target_metadata": model_metadata, - "upgrade_token": "upgrades", - "downgrade_token": "downgrades", - "alembic_module_prefix": "op.", - "sqlalchemy_module_prefix": "sa.", - "include_object": object_filters, - "include_name": name_filters, - "include_schemas": include_schemas, - } - if opts: - ctx_opts.update(opts) - self.context = context = MigrationContext.configure( - connection=conn, opts=ctx_opts - ) - - autogen_context = api.AutogenContext(context, model_metadata) - uo = ops.UpgradeOps(ops=[]) - autogenerate._produce_net_changes(autogen_context, uo) - - if return_ops: - return uo - else: - return uo.as_diffs() - finally: - if max_identifier_length: - dialect = self.bind.dialect - dialect.max_identifier_length = ( - dialect._user_defined_max_identifier_length - ) = existing_length - - def setUp(self): - staging_env() - self.bind = config.db - - def tearDown(self): - if hasattr(self, "_alembic_metadata"): - for m in util.to_list(self._alembic_metadata): - m.drop_all(self.bind) - clear_staging_env() diff --git a/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_comments.py b/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_comments.py deleted file mode 100644 index 7ef074f..0000000 --- a/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_comments.py +++ /dev/null @@ -1,242 +0,0 @@ -from sqlalchemy import Column -from sqlalchemy import Float -from sqlalchemy import MetaData -from sqlalchemy import String -from sqlalchemy import Table - -from ._autogen_fixtures import AutogenFixtureTest -from ...testing import eq_ -from ...testing import mock -from ...testing import TestBase - - -class AutogenerateCommentsTest(AutogenFixtureTest, TestBase): - __backend__ = True - - __requires__ = ("comments",) - - def test_existing_table_comment_no_change(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("test", String(10), primary_key=True), - comment="this is some table", - ) - - Table( - "some_table", - m2, - Column("test", String(10), primary_key=True), - comment="this is some table", - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs, []) - - def test_add_table_comment(self): - m1 = MetaData() - m2 = MetaData() - - Table("some_table", m1, Column("test", String(10), primary_key=True)) - - Table( - "some_table", - m2, - Column("test", String(10), primary_key=True), - comment="this is some table", - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs[0][0], "add_table_comment") - eq_(diffs[0][1].comment, "this is some table") - eq_(diffs[0][2], None) - - def test_remove_table_comment(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("test", String(10), primary_key=True), - comment="this is some table", - ) - - Table("some_table", m2, Column("test", String(10), primary_key=True)) - - diffs = self._fixture(m1, m2) - - eq_(diffs[0][0], "remove_table_comment") - eq_(diffs[0][1].comment, None) - - def test_alter_table_comment(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("test", String(10), primary_key=True), - comment="this is some table", - ) - - Table( - "some_table", - m2, - Column("test", String(10), primary_key=True), - comment="this is also some table", - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs[0][0], "add_table_comment") - eq_(diffs[0][1].comment, "this is also some table") - eq_(diffs[0][2], "this is some table") - - def test_existing_column_comment_no_change(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("test", String(10), primary_key=True), - Column("amount", Float, comment="the amount"), - ) - - Table( - "some_table", - m2, - Column("test", String(10), primary_key=True), - Column("amount", Float, comment="the amount"), - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs, []) - - def test_add_column_comment(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("test", String(10), primary_key=True), - Column("amount", Float), - ) - - Table( - "some_table", - m2, - Column("test", String(10), primary_key=True), - Column("amount", Float, comment="the amount"), - ) - - diffs = self._fixture(m1, m2) - eq_( - diffs, - [ - [ - ( - "modify_comment", - None, - "some_table", - "amount", - { - "existing_nullable": True, - "existing_type": mock.ANY, - "existing_server_default": False, - }, - None, - "the amount", - ) - ] - ], - ) - - def test_remove_column_comment(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("test", String(10), primary_key=True), - Column("amount", Float, comment="the amount"), - ) - - Table( - "some_table", - m2, - Column("test", String(10), primary_key=True), - Column("amount", Float), - ) - - diffs = self._fixture(m1, m2) - eq_( - diffs, - [ - [ - ( - "modify_comment", - None, - "some_table", - "amount", - { - "existing_nullable": True, - "existing_type": mock.ANY, - "existing_server_default": False, - }, - "the amount", - None, - ) - ] - ], - ) - - def test_alter_column_comment(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("test", String(10), primary_key=True), - Column("amount", Float, comment="the amount"), - ) - - Table( - "some_table", - m2, - Column("test", String(10), primary_key=True), - Column("amount", Float, comment="the adjusted amount"), - ) - - diffs = self._fixture(m1, m2) - - eq_( - diffs, - [ - [ - ( - "modify_comment", - None, - "some_table", - "amount", - { - "existing_nullable": True, - "existing_type": mock.ANY, - "existing_server_default": False, - }, - "the amount", - "the adjusted amount", - ) - ] - ], - ) diff --git a/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_computed.py b/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_computed.py deleted file mode 100644 index 04a3caf..0000000 --- a/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_computed.py +++ /dev/null @@ -1,204 +0,0 @@ -import sqlalchemy as sa -from sqlalchemy import Column -from sqlalchemy import Integer -from sqlalchemy import MetaData -from sqlalchemy import Table - -from ._autogen_fixtures import AutogenFixtureTest -from ... import testing -from ...testing import config -from ...testing import eq_ -from ...testing import exclusions -from ...testing import is_ -from ...testing import is_true -from ...testing import mock -from ...testing import TestBase - - -class AutogenerateComputedTest(AutogenFixtureTest, TestBase): - __requires__ = ("computed_columns",) - __backend__ = True - - def test_add_computed_column(self): - m1 = MetaData() - m2 = MetaData() - - Table("user", m1, Column("id", Integer, primary_key=True)) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("foo", Integer, sa.Computed("5")), - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs[0][0], "add_column") - eq_(diffs[0][2], "user") - eq_(diffs[0][3].name, "foo") - c = diffs[0][3].computed - - is_true(isinstance(c, sa.Computed)) - is_(c.persisted, None) - eq_(str(c.sqltext), "5") - - def test_remove_computed_column(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("foo", Integer, sa.Computed("5")), - ) - - Table("user", m2, Column("id", Integer, primary_key=True)) - - diffs = self._fixture(m1, m2) - - eq_(diffs[0][0], "remove_column") - eq_(diffs[0][2], "user") - c = diffs[0][3] - eq_(c.name, "foo") - - if config.requirements.computed_reflects_normally.enabled: - is_true(isinstance(c.computed, sa.Computed)) - else: - is_(c.computed, None) - - if config.requirements.computed_reflects_as_server_default.enabled: - is_true(isinstance(c.server_default, sa.DefaultClause)) - eq_(str(c.server_default.arg.text), "5") - elif config.requirements.computed_reflects_normally.enabled: - is_true(isinstance(c.computed, sa.Computed)) - else: - is_(c.computed, None) - - @testing.combinations( - lambda: (None, sa.Computed("bar*5")), - (lambda: (sa.Computed("bar*5"), None)), - lambda: ( - sa.Computed("bar*5"), - sa.Computed("bar * 42", persisted=True), - ), - lambda: (sa.Computed("bar*5"), sa.Computed("bar * 42")), - ) - @config.requirements.computed_reflects_normally - def test_cant_change_computed_warning(self, test_case): - arg_before, arg_after = testing.resolve_lambda(test_case, **locals()) - m1 = MetaData() - m2 = MetaData() - - arg_before = [] if arg_before is None else [arg_before] - arg_after = [] if arg_after is None else [arg_after] - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("bar", Integer), - Column("foo", Integer, *arg_before), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("bar", Integer), - Column("foo", Integer, *arg_after), - ) - - with mock.patch("alembic.util.warn") as mock_warn: - diffs = self._fixture(m1, m2) - - eq_( - mock_warn.mock_calls, - [mock.call("Computed default on user.foo cannot be modified")], - ) - - eq_(list(diffs), []) - - @testing.combinations( - lambda: (None, None), - lambda: (sa.Computed("5"), sa.Computed("5")), - lambda: (sa.Computed("bar*5"), sa.Computed("bar*5")), - lambda: (sa.Computed("bar*5"), sa.Computed("bar * \r\n\t5")), - ( - lambda: (sa.Computed("bar*5"), None), - config.requirements.computed_doesnt_reflect_as_server_default, - ), - ) - def test_computed_unchanged(self, test_case): - arg_before, arg_after = testing.resolve_lambda(test_case, **locals()) - m1 = MetaData() - m2 = MetaData() - - arg_before = [] if arg_before is None else [arg_before] - arg_after = [] if arg_after is None else [arg_after] - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("bar", Integer), - Column("foo", Integer, *arg_before), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("bar", Integer), - Column("foo", Integer, *arg_after), - ) - - with mock.patch("alembic.util.warn") as mock_warn: - diffs = self._fixture(m1, m2) - eq_(mock_warn.mock_calls, []) - - eq_(list(diffs), []) - - @config.requirements.computed_reflects_as_server_default - def test_remove_computed_default_on_computed(self): - """Asserts the current behavior which is that on PG and Oracle, - the GENERATED ALWAYS AS is reflected as a server default which we can't - tell is actually "computed", so these come out as a modification to - the server default. - - """ - m1 = MetaData() - m2 = MetaData() - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("bar", Integer), - Column("foo", Integer, sa.Computed("bar + 42")), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("bar", Integer), - Column("foo", Integer), - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs[0][0][0], "modify_default") - eq_(diffs[0][0][2], "user") - eq_(diffs[0][0][3], "foo") - old = diffs[0][0][-2] - new = diffs[0][0][-1] - - is_(new, None) - is_true(isinstance(old, sa.DefaultClause)) - - if exclusions.against(config, "postgresql"): - eq_(str(old.arg.text), "(bar + 42)") - elif exclusions.against(config, "oracle"): - eq_(str(old.arg.text), '"BAR"+42') diff --git a/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_diffs.py b/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_diffs.py deleted file mode 100644 index 75bcd37..0000000 --- a/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_diffs.py +++ /dev/null @@ -1,273 +0,0 @@ -from sqlalchemy import BigInteger -from sqlalchemy import Column -from sqlalchemy import Integer -from sqlalchemy import MetaData -from sqlalchemy import Table -from sqlalchemy.testing import in_ - -from ._autogen_fixtures import AutogenFixtureTest -from ... import testing -from ...testing import config -from ...testing import eq_ -from ...testing import is_ -from ...testing import TestBase - - -class AlterColumnTest(AutogenFixtureTest, TestBase): - __backend__ = True - - @testing.combinations((True,), (False,)) - @config.requirements.comments - def test_all_existings_filled(self, pk): - m1 = MetaData() - m2 = MetaData() - - Table("a", m1, Column("x", Integer, primary_key=pk)) - Table("a", m2, Column("x", Integer, comment="x", primary_key=pk)) - - alter_col = self._assert_alter_col(m1, m2, pk) - eq_(alter_col.modify_comment, "x") - - @testing.combinations((True,), (False,)) - @config.requirements.comments - def test_all_existings_filled_in_notnull(self, pk): - m1 = MetaData() - m2 = MetaData() - - Table("a", m1, Column("x", Integer, nullable=False, primary_key=pk)) - Table( - "a", - m2, - Column("x", Integer, nullable=False, comment="x", primary_key=pk), - ) - - self._assert_alter_col(m1, m2, pk, nullable=False) - - @testing.combinations((True,), (False,)) - @config.requirements.comments - def test_all_existings_filled_in_comment(self, pk): - m1 = MetaData() - m2 = MetaData() - - Table("a", m1, Column("x", Integer, comment="old", primary_key=pk)) - Table("a", m2, Column("x", Integer, comment="new", primary_key=pk)) - - alter_col = self._assert_alter_col(m1, m2, pk) - eq_(alter_col.existing_comment, "old") - - @testing.combinations((True,), (False,)) - @config.requirements.comments - def test_all_existings_filled_in_server_default(self, pk): - m1 = MetaData() - m2 = MetaData() - - Table( - "a", m1, Column("x", Integer, server_default="5", primary_key=pk) - ) - Table( - "a", - m2, - Column( - "x", Integer, server_default="5", comment="new", primary_key=pk - ), - ) - - alter_col = self._assert_alter_col(m1, m2, pk) - in_("5", alter_col.existing_server_default.arg.text) - - def _assert_alter_col(self, m1, m2, pk, nullable=None): - ops = self._fixture(m1, m2, return_ops=True) - modify_table = ops.ops[-1] - alter_col = modify_table.ops[0] - - if nullable is None: - eq_(alter_col.existing_nullable, not pk) - else: - eq_(alter_col.existing_nullable, nullable) - assert alter_col.existing_type._compare_type_affinity(Integer()) - return alter_col - - -class AutoincrementTest(AutogenFixtureTest, TestBase): - __backend__ = True - __requires__ = ("integer_subtype_comparisons",) - - def test_alter_column_autoincrement_none(self): - m1 = MetaData() - m2 = MetaData() - - Table("a", m1, Column("x", Integer, nullable=False)) - Table("a", m2, Column("x", Integer, nullable=True)) - - ops = self._fixture(m1, m2, return_ops=True) - assert "autoincrement" not in ops.ops[0].ops[0].kw - - def test_alter_column_autoincrement_pk_false(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "a", - m1, - Column("x", Integer, primary_key=True, autoincrement=False), - ) - Table( - "a", - m2, - Column("x", BigInteger, primary_key=True, autoincrement=False), - ) - - ops = self._fixture(m1, m2, return_ops=True) - is_(ops.ops[0].ops[0].kw["autoincrement"], False) - - def test_alter_column_autoincrement_pk_implicit_true(self): - m1 = MetaData() - m2 = MetaData() - - Table("a", m1, Column("x", Integer, primary_key=True)) - Table("a", m2, Column("x", BigInteger, primary_key=True)) - - ops = self._fixture(m1, m2, return_ops=True) - is_(ops.ops[0].ops[0].kw["autoincrement"], True) - - def test_alter_column_autoincrement_pk_explicit_true(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "a", m1, Column("x", Integer, primary_key=True, autoincrement=True) - ) - Table( - "a", - m2, - Column("x", BigInteger, primary_key=True, autoincrement=True), - ) - - ops = self._fixture(m1, m2, return_ops=True) - is_(ops.ops[0].ops[0].kw["autoincrement"], True) - - def test_alter_column_autoincrement_nonpk_false(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "a", - m1, - Column("id", Integer, primary_key=True), - Column("x", Integer, autoincrement=False), - ) - Table( - "a", - m2, - Column("id", Integer, primary_key=True), - Column("x", BigInteger, autoincrement=False), - ) - - ops = self._fixture(m1, m2, return_ops=True) - is_(ops.ops[0].ops[0].kw["autoincrement"], False) - - def test_alter_column_autoincrement_nonpk_implicit_false(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "a", - m1, - Column("id", Integer, primary_key=True), - Column("x", Integer), - ) - Table( - "a", - m2, - Column("id", Integer, primary_key=True), - Column("x", BigInteger), - ) - - ops = self._fixture(m1, m2, return_ops=True) - assert "autoincrement" not in ops.ops[0].ops[0].kw - - def test_alter_column_autoincrement_nonpk_explicit_true(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "a", - m1, - Column("id", Integer, primary_key=True, autoincrement=False), - Column("x", Integer, autoincrement=True), - ) - Table( - "a", - m2, - Column("id", Integer, primary_key=True, autoincrement=False), - Column("x", BigInteger, autoincrement=True), - ) - - ops = self._fixture(m1, m2, return_ops=True) - is_(ops.ops[0].ops[0].kw["autoincrement"], True) - - def test_alter_column_autoincrement_compositepk_false(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "a", - m1, - Column("id", Integer, primary_key=True), - Column("x", Integer, primary_key=True, autoincrement=False), - ) - Table( - "a", - m2, - Column("id", Integer, primary_key=True), - Column("x", BigInteger, primary_key=True, autoincrement=False), - ) - - ops = self._fixture(m1, m2, return_ops=True) - is_(ops.ops[0].ops[0].kw["autoincrement"], False) - - def test_alter_column_autoincrement_compositepk_implicit_false(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "a", - m1, - Column("id", Integer, primary_key=True), - Column("x", Integer, primary_key=True), - ) - Table( - "a", - m2, - Column("id", Integer, primary_key=True), - Column("x", BigInteger, primary_key=True), - ) - - ops = self._fixture(m1, m2, return_ops=True) - assert "autoincrement" not in ops.ops[0].ops[0].kw - - @config.requirements.autoincrement_on_composite_pk - def test_alter_column_autoincrement_compositepk_explicit_true(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "a", - m1, - Column("id", Integer, primary_key=True, autoincrement=False), - Column("x", Integer, primary_key=True, autoincrement=True), - # on SQLA 1.0 and earlier, this being present - # trips the "add KEY for the primary key" so that the - # AUTO_INCREMENT keyword is accepted by MySQL. SQLA 1.1 and - # greater the columns are just reorganized. - mysql_engine="InnoDB", - ) - Table( - "a", - m2, - Column("id", Integer, primary_key=True, autoincrement=False), - Column("x", BigInteger, primary_key=True, autoincrement=True), - ) - - ops = self._fixture(m1, m2, return_ops=True) - is_(ops.ops[0].ops[0].kw["autoincrement"], True) diff --git a/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_fks.py b/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_fks.py deleted file mode 100644 index 0240b98..0000000 --- a/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_fks.py +++ /dev/null @@ -1,1190 +0,0 @@ -from sqlalchemy import Column -from sqlalchemy import ForeignKeyConstraint -from sqlalchemy import Integer -from sqlalchemy import MetaData -from sqlalchemy import String -from sqlalchemy import Table - -from ._autogen_fixtures import AutogenFixtureTest -from ...testing import combinations -from ...testing import config -from ...testing import eq_ -from ...testing import mock -from ...testing import TestBase - - -class AutogenerateForeignKeysTest(AutogenFixtureTest, TestBase): - __backend__ = True - __requires__ = ("foreign_key_constraint_reflection",) - - def test_remove_fk(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("test", String(10), primary_key=True), - ) - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("test2", String(10)), - ForeignKeyConstraint(["test2"], ["some_table.test"]), - ) - - Table( - "some_table", - m2, - Column("test", String(10), primary_key=True), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("test2", String(10)), - ) - - diffs = self._fixture(m1, m2) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["test2"], - "some_table", - ["test"], - conditional_name="servergenerated", - ) - - def test_add_fk(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("id", Integer, primary_key=True), - Column("test", String(10)), - ) - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("test2", String(10)), - ) - - Table( - "some_table", - m2, - Column("id", Integer, primary_key=True), - Column("test", String(10)), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("test2", String(10)), - ForeignKeyConstraint(["test2"], ["some_table.test"]), - ) - - diffs = self._fixture(m1, m2) - - self._assert_fk_diff( - diffs[0], "add_fk", "user", ["test2"], "some_table", ["test"] - ) - - def test_no_change(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("id", Integer, primary_key=True), - Column("test", String(10)), - ) - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("test2", Integer), - ForeignKeyConstraint(["test2"], ["some_table.id"]), - ) - - Table( - "some_table", - m2, - Column("id", Integer, primary_key=True), - Column("test", String(10)), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("test2", Integer), - ForeignKeyConstraint(["test2"], ["some_table.id"]), - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs, []) - - def test_no_change_composite_fk(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("id_1", String(10), primary_key=True), - Column("id_2", String(10), primary_key=True), - ) - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("other_id_1", String(10)), - Column("other_id_2", String(10)), - ForeignKeyConstraint( - ["other_id_1", "other_id_2"], - ["some_table.id_1", "some_table.id_2"], - ), - ) - - Table( - "some_table", - m2, - Column("id_1", String(10), primary_key=True), - Column("id_2", String(10), primary_key=True), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("other_id_1", String(10)), - Column("other_id_2", String(10)), - ForeignKeyConstraint( - ["other_id_1", "other_id_2"], - ["some_table.id_1", "some_table.id_2"], - ), - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs, []) - - def test_casing_convention_changed_so_put_drops_first(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("test", String(10), primary_key=True), - ) - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("test2", String(10)), - ForeignKeyConstraint(["test2"], ["some_table.test"], name="MyFK"), - ) - - Table( - "some_table", - m2, - Column("test", String(10), primary_key=True), - ) - - # foreign key autogen currently does not take "name" into account, - # so change the def just for the purposes of testing the - # add/drop order for now. - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("test2", String(10)), - ForeignKeyConstraint(["a1"], ["some_table.test"], name="myfk"), - ) - - diffs = self._fixture(m1, m2) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["test2"], - "some_table", - ["test"], - name="MyFK" if config.requirements.fk_names.enabled else None, - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["a1"], - "some_table", - ["test"], - name="myfk", - ) - - def test_add_composite_fk_with_name(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("id_1", String(10), primary_key=True), - Column("id_2", String(10), primary_key=True), - ) - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("other_id_1", String(10)), - Column("other_id_2", String(10)), - ) - - Table( - "some_table", - m2, - Column("id_1", String(10), primary_key=True), - Column("id_2", String(10), primary_key=True), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("other_id_1", String(10)), - Column("other_id_2", String(10)), - ForeignKeyConstraint( - ["other_id_1", "other_id_2"], - ["some_table.id_1", "some_table.id_2"], - name="fk_test_name", - ), - ) - - diffs = self._fixture(m1, m2) - self._assert_fk_diff( - diffs[0], - "add_fk", - "user", - ["other_id_1", "other_id_2"], - "some_table", - ["id_1", "id_2"], - name="fk_test_name", - ) - - @config.requirements.no_name_normalize - def test_remove_composite_fk(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("id_1", String(10), primary_key=True), - Column("id_2", String(10), primary_key=True), - ) - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("other_id_1", String(10)), - Column("other_id_2", String(10)), - ForeignKeyConstraint( - ["other_id_1", "other_id_2"], - ["some_table.id_1", "some_table.id_2"], - name="fk_test_name", - ), - ) - - Table( - "some_table", - m2, - Column("id_1", String(10), primary_key=True), - Column("id_2", String(10), primary_key=True), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("a1", String(10), server_default="x"), - Column("other_id_1", String(10)), - Column("other_id_2", String(10)), - ) - - diffs = self._fixture(m1, m2) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["other_id_1", "other_id_2"], - "some_table", - ["id_1", "id_2"], - conditional_name="fk_test_name", - ) - - def test_add_fk_colkeys(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("id_1", String(10), primary_key=True), - Column("id_2", String(10), primary_key=True), - ) - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("other_id_1", String(10)), - Column("other_id_2", String(10)), - ) - - Table( - "some_table", - m2, - Column("id_1", String(10), key="tid1", primary_key=True), - Column("id_2", String(10), key="tid2", primary_key=True), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("other_id_1", String(10), key="oid1"), - Column("other_id_2", String(10), key="oid2"), - ForeignKeyConstraint( - ["oid1", "oid2"], - ["some_table.tid1", "some_table.tid2"], - name="fk_test_name", - ), - ) - - diffs = self._fixture(m1, m2) - - self._assert_fk_diff( - diffs[0], - "add_fk", - "user", - ["other_id_1", "other_id_2"], - "some_table", - ["id_1", "id_2"], - name="fk_test_name", - ) - - def test_no_change_colkeys(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("id_1", String(10), primary_key=True), - Column("id_2", String(10), primary_key=True), - ) - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("other_id_1", String(10)), - Column("other_id_2", String(10)), - ForeignKeyConstraint( - ["other_id_1", "other_id_2"], - ["some_table.id_1", "some_table.id_2"], - ), - ) - - Table( - "some_table", - m2, - Column("id_1", String(10), key="tid1", primary_key=True), - Column("id_2", String(10), key="tid2", primary_key=True), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("other_id_1", String(10), key="oid1"), - Column("other_id_2", String(10), key="oid2"), - ForeignKeyConstraint( - ["oid1", "oid2"], ["some_table.tid1", "some_table.tid2"] - ), - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs, []) - - -class IncludeHooksTest(AutogenFixtureTest, TestBase): - __backend__ = True - __requires__ = ("fk_names",) - - @combinations(("object",), ("name",)) - @config.requirements.no_name_normalize - def test_remove_connection_fk(self, hook_type): - m1 = MetaData() - m2 = MetaData() - - ref = Table( - "ref", - m1, - Column("id", Integer, primary_key=True), - ) - t1 = Table( - "t", - m1, - Column("x", Integer), - Column("y", Integer), - ) - t1.append_constraint( - ForeignKeyConstraint([t1.c.x], [ref.c.id], name="fk1") - ) - t1.append_constraint( - ForeignKeyConstraint([t1.c.y], [ref.c.id], name="fk2") - ) - - ref = Table( - "ref", - m2, - Column("id", Integer, primary_key=True), - ) - Table( - "t", - m2, - Column("x", Integer), - Column("y", Integer), - ) - - if hook_type == "object": - - def include_object(object_, name, type_, reflected, compare_to): - return not ( - isinstance(object_, ForeignKeyConstraint) - and type_ == "foreign_key_constraint" - and reflected - and name == "fk1" - ) - - diffs = self._fixture(m1, m2, object_filters=include_object) - elif hook_type == "name": - - def include_name(name, type_, parent_names): - if name == "fk1": - if type_ == "index": # MariaDB thing - return True - eq_(type_, "foreign_key_constraint") - eq_( - parent_names, - { - "schema_name": None, - "table_name": "t", - "schema_qualified_table_name": "t", - }, - ) - return False - else: - return True - - diffs = self._fixture(m1, m2, name_filters=include_name) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "t", - ["y"], - "ref", - ["id"], - conditional_name="fk2", - ) - eq_(len(diffs), 1) - - def test_add_metadata_fk(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "ref", - m1, - Column("id", Integer, primary_key=True), - ) - Table( - "t", - m1, - Column("x", Integer), - Column("y", Integer), - ) - - ref = Table( - "ref", - m2, - Column("id", Integer, primary_key=True), - ) - t2 = Table( - "t", - m2, - Column("x", Integer), - Column("y", Integer), - ) - t2.append_constraint( - ForeignKeyConstraint([t2.c.x], [ref.c.id], name="fk1") - ) - t2.append_constraint( - ForeignKeyConstraint([t2.c.y], [ref.c.id], name="fk2") - ) - - def include_object(object_, name, type_, reflected, compare_to): - return not ( - isinstance(object_, ForeignKeyConstraint) - and type_ == "foreign_key_constraint" - and not reflected - and name == "fk1" - ) - - diffs = self._fixture(m1, m2, object_filters=include_object) - - self._assert_fk_diff( - diffs[0], "add_fk", "t", ["y"], "ref", ["id"], name="fk2" - ) - eq_(len(diffs), 1) - - @combinations(("object",), ("name",)) - @config.requirements.no_name_normalize - def test_change_fk(self, hook_type): - m1 = MetaData() - m2 = MetaData() - - r1a = Table( - "ref_a", - m1, - Column("a", Integer, primary_key=True), - ) - Table( - "ref_b", - m1, - Column("a", Integer, primary_key=True), - Column("b", Integer, primary_key=True), - ) - t1 = Table( - "t", - m1, - Column("x", Integer), - Column("y", Integer), - Column("z", Integer), - ) - t1.append_constraint( - ForeignKeyConstraint([t1.c.x], [r1a.c.a], name="fk1") - ) - t1.append_constraint( - ForeignKeyConstraint([t1.c.y], [r1a.c.a], name="fk2") - ) - - Table( - "ref_a", - m2, - Column("a", Integer, primary_key=True), - ) - r2b = Table( - "ref_b", - m2, - Column("a", Integer, primary_key=True), - Column("b", Integer, primary_key=True), - ) - t2 = Table( - "t", - m2, - Column("x", Integer), - Column("y", Integer), - Column("z", Integer), - ) - t2.append_constraint( - ForeignKeyConstraint( - [t2.c.x, t2.c.z], [r2b.c.a, r2b.c.b], name="fk1" - ) - ) - t2.append_constraint( - ForeignKeyConstraint( - [t2.c.y, t2.c.z], [r2b.c.a, r2b.c.b], name="fk2" - ) - ) - - if hook_type == "object": - - def include_object(object_, name, type_, reflected, compare_to): - return not ( - isinstance(object_, ForeignKeyConstraint) - and type_ == "foreign_key_constraint" - and name == "fk1" - ) - - diffs = self._fixture(m1, m2, object_filters=include_object) - elif hook_type == "name": - - def include_name(name, type_, parent_names): - if type_ == "index": - return True # MariaDB thing - - if name == "fk1": - eq_(type_, "foreign_key_constraint") - eq_( - parent_names, - { - "schema_name": None, - "table_name": "t", - "schema_qualified_table_name": "t", - }, - ) - return False - else: - return True - - diffs = self._fixture(m1, m2, name_filters=include_name) - - if hook_type == "object": - self._assert_fk_diff( - diffs[0], "remove_fk", "t", ["y"], "ref_a", ["a"], name="fk2" - ) - self._assert_fk_diff( - diffs[1], - "add_fk", - "t", - ["y", "z"], - "ref_b", - ["a", "b"], - name="fk2", - ) - eq_(len(diffs), 2) - elif hook_type == "name": - eq_( - {(d[0], d[1].name) for d in diffs}, - {("add_fk", "fk2"), ("add_fk", "fk1"), ("remove_fk", "fk2")}, - ) - - -class AutogenerateFKOptionsTest(AutogenFixtureTest, TestBase): - __backend__ = True - - def _fk_opts_fixture(self, old_opts, new_opts): - m1 = MetaData() - m2 = MetaData() - - Table( - "some_table", - m1, - Column("id", Integer, primary_key=True), - Column("test", String(10)), - ) - - Table( - "user", - m1, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("tid", Integer), - ForeignKeyConstraint(["tid"], ["some_table.id"], **old_opts), - ) - - Table( - "some_table", - m2, - Column("id", Integer, primary_key=True), - Column("test", String(10)), - ) - - Table( - "user", - m2, - Column("id", Integer, primary_key=True), - Column("name", String(50), nullable=False), - Column("tid", Integer), - ForeignKeyConstraint(["tid"], ["some_table.id"], **new_opts), - ) - - return self._fixture(m1, m2) - - @config.requirements.fk_ondelete_is_reflected - def test_add_ondelete(self): - diffs = self._fk_opts_fixture({}, {"ondelete": "cascade"}) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - ondelete=None, - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - ondelete="cascade", - ) - - @config.requirements.fk_ondelete_is_reflected - def test_remove_ondelete(self): - diffs = self._fk_opts_fixture({"ondelete": "CASCADE"}, {}) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - ondelete="CASCADE", - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - ondelete=None, - ) - - def test_nochange_ondelete(self): - """test case sensitivity""" - diffs = self._fk_opts_fixture( - {"ondelete": "caSCAde"}, {"ondelete": "CasCade"} - ) - eq_(diffs, []) - - @config.requirements.fk_onupdate_is_reflected - def test_add_onupdate(self): - diffs = self._fk_opts_fixture({}, {"onupdate": "cascade"}) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - onupdate=None, - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - onupdate="cascade", - ) - - @config.requirements.fk_onupdate_is_reflected - def test_remove_onupdate(self): - diffs = self._fk_opts_fixture({"onupdate": "CASCADE"}, {}) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - onupdate="CASCADE", - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - onupdate=None, - ) - - @config.requirements.fk_onupdate - def test_nochange_onupdate(self): - """test case sensitivity""" - diffs = self._fk_opts_fixture( - {"onupdate": "caSCAde"}, {"onupdate": "CasCade"} - ) - eq_(diffs, []) - - @config.requirements.fk_ondelete_restrict - def test_nochange_ondelete_restrict(self): - """test the RESTRICT option which MySQL doesn't report on""" - - diffs = self._fk_opts_fixture( - {"ondelete": "restrict"}, {"ondelete": "restrict"} - ) - eq_(diffs, []) - - @config.requirements.fk_onupdate_restrict - def test_nochange_onupdate_restrict(self): - """test the RESTRICT option which MySQL doesn't report on""" - - diffs = self._fk_opts_fixture( - {"onupdate": "restrict"}, {"onupdate": "restrict"} - ) - eq_(diffs, []) - - @config.requirements.fk_ondelete_noaction - def test_nochange_ondelete_noaction(self): - """test the NO ACTION option which generally comes back as None""" - - diffs = self._fk_opts_fixture( - {"ondelete": "no action"}, {"ondelete": "no action"} - ) - eq_(diffs, []) - - @config.requirements.fk_onupdate - def test_nochange_onupdate_noaction(self): - """test the NO ACTION option which generally comes back as None""" - - diffs = self._fk_opts_fixture( - {"onupdate": "no action"}, {"onupdate": "no action"} - ) - eq_(diffs, []) - - @config.requirements.fk_ondelete_restrict - def test_change_ondelete_from_restrict(self): - """test the RESTRICT option which MySQL doesn't report on""" - - # note that this is impossible to detect if we change - # from RESTRICT to NO ACTION on MySQL. - diffs = self._fk_opts_fixture( - {"ondelete": "restrict"}, {"ondelete": "cascade"} - ) - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - onupdate=None, - ondelete=mock.ANY, # MySQL reports None, PG reports RESTRICT - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - onupdate=None, - ondelete="cascade", - ) - - @config.requirements.fk_ondelete_restrict - def test_change_onupdate_from_restrict(self): - """test the RESTRICT option which MySQL doesn't report on""" - - # note that this is impossible to detect if we change - # from RESTRICT to NO ACTION on MySQL. - diffs = self._fk_opts_fixture( - {"onupdate": "restrict"}, {"onupdate": "cascade"} - ) - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - onupdate=mock.ANY, # MySQL reports None, PG reports RESTRICT - ondelete=None, - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - onupdate="cascade", - ondelete=None, - ) - - @config.requirements.fk_ondelete_is_reflected - @config.requirements.fk_onupdate_is_reflected - def test_ondelete_onupdate_combo(self): - diffs = self._fk_opts_fixture( - {"onupdate": "CASCADE", "ondelete": "SET NULL"}, - {"onupdate": "RESTRICT", "ondelete": "RESTRICT"}, - ) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - onupdate="CASCADE", - ondelete="SET NULL", - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - onupdate="RESTRICT", - ondelete="RESTRICT", - ) - - @config.requirements.fk_initially - def test_add_initially_deferred(self): - diffs = self._fk_opts_fixture({}, {"initially": "deferred"}) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - initially=None, - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - initially="deferred", - ) - - @config.requirements.fk_initially - def test_remove_initially_deferred(self): - diffs = self._fk_opts_fixture({"initially": "deferred"}, {}) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - initially="DEFERRED", - deferrable=True, - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - initially=None, - ) - - @config.requirements.fk_deferrable - @config.requirements.fk_initially - def test_add_initially_immediate_plus_deferrable(self): - diffs = self._fk_opts_fixture( - {}, {"initially": "immediate", "deferrable": True} - ) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - initially=None, - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - initially="immediate", - deferrable=True, - ) - - @config.requirements.fk_deferrable - @config.requirements.fk_initially - def test_remove_initially_immediate_plus_deferrable(self): - diffs = self._fk_opts_fixture( - {"initially": "immediate", "deferrable": True}, {} - ) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - initially=None, # immediate is the default - deferrable=True, - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - initially=None, - deferrable=None, - ) - - @config.requirements.fk_initially - @config.requirements.fk_deferrable - def test_add_initially_deferrable_nochange_one(self): - diffs = self._fk_opts_fixture( - {"deferrable": True, "initially": "immediate"}, - {"deferrable": True, "initially": "immediate"}, - ) - - eq_(diffs, []) - - @config.requirements.fk_initially - @config.requirements.fk_deferrable - def test_add_initially_deferrable_nochange_two(self): - diffs = self._fk_opts_fixture( - {"deferrable": True, "initially": "deferred"}, - {"deferrable": True, "initially": "deferred"}, - ) - - eq_(diffs, []) - - @config.requirements.fk_initially - @config.requirements.fk_deferrable - def test_add_initially_deferrable_nochange_three(self): - diffs = self._fk_opts_fixture( - {"deferrable": None, "initially": "deferred"}, - {"deferrable": None, "initially": "deferred"}, - ) - - eq_(diffs, []) - - @config.requirements.fk_deferrable - def test_add_deferrable(self): - diffs = self._fk_opts_fixture({}, {"deferrable": True}) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - deferrable=None, - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - deferrable=True, - ) - - @config.requirements.fk_deferrable_is_reflected - def test_remove_deferrable(self): - diffs = self._fk_opts_fixture({"deferrable": True}, {}) - - self._assert_fk_diff( - diffs[0], - "remove_fk", - "user", - ["tid"], - "some_table", - ["id"], - deferrable=True, - conditional_name="servergenerated", - ) - - self._assert_fk_diff( - diffs[1], - "add_fk", - "user", - ["tid"], - "some_table", - ["id"], - deferrable=None, - ) diff --git a/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_identity.py b/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_identity.py deleted file mode 100644 index 3dee9fc..0000000 --- a/myenv/Lib/site-packages/alembic/testing/suite/test_autogen_identity.py +++ /dev/null @@ -1,226 +0,0 @@ -import sqlalchemy as sa -from sqlalchemy import Column -from sqlalchemy import Integer -from sqlalchemy import MetaData -from sqlalchemy import Table - -from alembic.util import sqla_compat -from ._autogen_fixtures import AutogenFixtureTest -from ... import testing -from ...testing import config -from ...testing import eq_ -from ...testing import is_true -from ...testing import TestBase - - -class AutogenerateIdentityTest(AutogenFixtureTest, TestBase): - __requires__ = ("identity_columns",) - __backend__ = True - - def test_add_identity_column(self): - m1 = MetaData() - m2 = MetaData() - - Table("user", m1, Column("other", sa.Text)) - - Table( - "user", - m2, - Column("other", sa.Text), - Column( - "id", - Integer, - sa.Identity(start=5, increment=7), - primary_key=True, - ), - ) - - diffs = self._fixture(m1, m2) - - eq_(diffs[0][0], "add_column") - eq_(diffs[0][2], "user") - eq_(diffs[0][3].name, "id") - i = diffs[0][3].identity - - is_true(isinstance(i, sa.Identity)) - eq_(i.start, 5) - eq_(i.increment, 7) - - def test_remove_identity_column(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "user", - m1, - Column( - "id", - Integer, - sa.Identity(start=2, increment=3), - primary_key=True, - ), - ) - - Table("user", m2) - - diffs = self._fixture(m1, m2) - - eq_(diffs[0][0], "remove_column") - eq_(diffs[0][2], "user") - c = diffs[0][3] - eq_(c.name, "id") - - is_true(isinstance(c.identity, sa.Identity)) - eq_(c.identity.start, 2) - eq_(c.identity.increment, 3) - - def test_no_change_identity_column(self): - m1 = MetaData() - m2 = MetaData() - - for m in (m1, m2): - id_ = sa.Identity(start=2) - Table("user", m, Column("id", Integer, id_)) - - diffs = self._fixture(m1, m2) - - eq_(diffs, []) - - def test_dialect_kwargs_changes(self): - m1 = MetaData() - m2 = MetaData() - - if sqla_compat.identity_has_dialect_kwargs: - args = {"oracle_on_null": True, "oracle_order": True} - else: - args = {"on_null": True, "order": True} - - Table("user", m1, Column("id", Integer, sa.Identity(start=2))) - id_ = sa.Identity(start=2, **args) - Table("user", m2, Column("id", Integer, id_)) - - diffs = self._fixture(m1, m2) - if config.db.name == "oracle": - is_true(len(diffs), 1) - eq_(diffs[0][0][0], "modify_default") - else: - eq_(diffs, []) - - @testing.combinations( - (None, dict(start=2)), - (dict(start=2), None), - (dict(start=2), dict(start=2, increment=7)), - (dict(always=False), dict(always=True)), - ( - dict(start=1, minvalue=0, maxvalue=100, cycle=True), - dict(start=1, minvalue=0, maxvalue=100, cycle=False), - ), - ( - dict(start=10, increment=3, maxvalue=9999), - dict(start=10, increment=1, maxvalue=3333), - ), - ) - @config.requirements.identity_columns_alter - def test_change_identity(self, before, after): - arg_before = (sa.Identity(**before),) if before else () - arg_after = (sa.Identity(**after),) if after else () - - m1 = MetaData() - m2 = MetaData() - - Table( - "user", - m1, - Column("id", Integer, *arg_before), - Column("other", sa.Text), - ) - - Table( - "user", - m2, - Column("id", Integer, *arg_after), - Column("other", sa.Text), - ) - - diffs = self._fixture(m1, m2) - - eq_(len(diffs[0]), 1) - diffs = diffs[0][0] - eq_(diffs[0], "modify_default") - eq_(diffs[2], "user") - eq_(diffs[3], "id") - old = diffs[5] - new = diffs[6] - - def check(kw, idt): - if kw: - is_true(isinstance(idt, sa.Identity)) - for k, v in kw.items(): - eq_(getattr(idt, k), v) - else: - is_true(idt in (None, False)) - - check(before, old) - check(after, new) - - def test_add_identity_to_column(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "user", - m1, - Column("id", Integer), - Column("other", sa.Text), - ) - - Table( - "user", - m2, - Column("id", Integer, sa.Identity(start=2, maxvalue=1000)), - Column("other", sa.Text), - ) - - diffs = self._fixture(m1, m2) - - eq_(len(diffs[0]), 1) - diffs = diffs[0][0] - eq_(diffs[0], "modify_default") - eq_(diffs[2], "user") - eq_(diffs[3], "id") - eq_(diffs[5], None) - added = diffs[6] - - is_true(isinstance(added, sa.Identity)) - eq_(added.start, 2) - eq_(added.maxvalue, 1000) - - def test_remove_identity_from_column(self): - m1 = MetaData() - m2 = MetaData() - - Table( - "user", - m1, - Column("id", Integer, sa.Identity(start=2, maxvalue=1000)), - Column("other", sa.Text), - ) - - Table( - "user", - m2, - Column("id", Integer), - Column("other", sa.Text), - ) - - diffs = self._fixture(m1, m2) - - eq_(len(diffs[0]), 1) - diffs = diffs[0][0] - eq_(diffs[0], "modify_default") - eq_(diffs[2], "user") - eq_(diffs[3], "id") - eq_(diffs[6], None) - removed = diffs[5] - - is_true(isinstance(removed, sa.Identity)) diff --git a/myenv/Lib/site-packages/alembic/testing/suite/test_environment.py b/myenv/Lib/site-packages/alembic/testing/suite/test_environment.py deleted file mode 100644 index df2d9af..0000000 --- a/myenv/Lib/site-packages/alembic/testing/suite/test_environment.py +++ /dev/null @@ -1,364 +0,0 @@ -import io - -from ...migration import MigrationContext -from ...testing import assert_raises -from ...testing import config -from ...testing import eq_ -from ...testing import is_ -from ...testing import is_false -from ...testing import is_not_ -from ...testing import is_true -from ...testing import ne_ -from ...testing.fixtures import TestBase - - -class MigrationTransactionTest(TestBase): - __backend__ = True - - conn = None - - def _fixture(self, opts): - self.conn = conn = config.db.connect() - - if opts.get("as_sql", False): - self.context = MigrationContext.configure( - dialect=conn.dialect, opts=opts - ) - self.context.output_buffer = self.context.impl.output_buffer = ( - io.StringIO() - ) - else: - self.context = MigrationContext.configure( - connection=conn, opts=opts - ) - return self.context - - def teardown_method(self): - if self.conn: - self.conn.close() - - def test_proxy_transaction_rollback(self): - context = self._fixture( - {"transaction_per_migration": True, "transactional_ddl": True} - ) - - is_false(self.conn.in_transaction()) - proxy = context.begin_transaction(_per_migration=True) - is_true(self.conn.in_transaction()) - proxy.rollback() - is_false(self.conn.in_transaction()) - - def test_proxy_transaction_commit(self): - context = self._fixture( - {"transaction_per_migration": True, "transactional_ddl": True} - ) - proxy = context.begin_transaction(_per_migration=True) - is_true(self.conn.in_transaction()) - proxy.commit() - is_false(self.conn.in_transaction()) - - def test_proxy_transaction_contextmanager_commit(self): - context = self._fixture( - {"transaction_per_migration": True, "transactional_ddl": True} - ) - proxy = context.begin_transaction(_per_migration=True) - is_true(self.conn.in_transaction()) - with proxy: - pass - is_false(self.conn.in_transaction()) - - def test_proxy_transaction_contextmanager_rollback(self): - context = self._fixture( - {"transaction_per_migration": True, "transactional_ddl": True} - ) - proxy = context.begin_transaction(_per_migration=True) - is_true(self.conn.in_transaction()) - - def go(): - with proxy: - raise Exception("hi") - - assert_raises(Exception, go) - is_false(self.conn.in_transaction()) - - def test_proxy_transaction_contextmanager_explicit_rollback(self): - context = self._fixture( - {"transaction_per_migration": True, "transactional_ddl": True} - ) - proxy = context.begin_transaction(_per_migration=True) - is_true(self.conn.in_transaction()) - - with proxy: - is_true(self.conn.in_transaction()) - proxy.rollback() - is_false(self.conn.in_transaction()) - - is_false(self.conn.in_transaction()) - - def test_proxy_transaction_contextmanager_explicit_commit(self): - context = self._fixture( - {"transaction_per_migration": True, "transactional_ddl": True} - ) - proxy = context.begin_transaction(_per_migration=True) - is_true(self.conn.in_transaction()) - - with proxy: - is_true(self.conn.in_transaction()) - proxy.commit() - is_false(self.conn.in_transaction()) - - is_false(self.conn.in_transaction()) - - def test_transaction_per_migration_transactional_ddl(self): - context = self._fixture( - {"transaction_per_migration": True, "transactional_ddl": True} - ) - - is_false(self.conn.in_transaction()) - - with context.begin_transaction(): - is_false(self.conn.in_transaction()) - with context.begin_transaction(_per_migration=True): - is_true(self.conn.in_transaction()) - - is_false(self.conn.in_transaction()) - is_false(self.conn.in_transaction()) - - def test_transaction_per_migration_non_transactional_ddl(self): - context = self._fixture( - {"transaction_per_migration": True, "transactional_ddl": False} - ) - - is_false(self.conn.in_transaction()) - - with context.begin_transaction(): - is_false(self.conn.in_transaction()) - with context.begin_transaction(_per_migration=True): - is_true(self.conn.in_transaction()) - - is_false(self.conn.in_transaction()) - is_false(self.conn.in_transaction()) - - def test_transaction_per_all_transactional_ddl(self): - context = self._fixture({"transactional_ddl": True}) - - is_false(self.conn.in_transaction()) - - with context.begin_transaction(): - is_true(self.conn.in_transaction()) - with context.begin_transaction(_per_migration=True): - is_true(self.conn.in_transaction()) - - is_true(self.conn.in_transaction()) - is_false(self.conn.in_transaction()) - - def test_transaction_per_all_non_transactional_ddl(self): - context = self._fixture({"transactional_ddl": False}) - - is_false(self.conn.in_transaction()) - - with context.begin_transaction(): - is_false(self.conn.in_transaction()) - with context.begin_transaction(_per_migration=True): - is_true(self.conn.in_transaction()) - - is_false(self.conn.in_transaction()) - is_false(self.conn.in_transaction()) - - def test_transaction_per_all_sqlmode(self): - context = self._fixture({"as_sql": True}) - - context.execute("step 1") - with context.begin_transaction(): - context.execute("step 2") - with context.begin_transaction(_per_migration=True): - context.execute("step 3") - - context.execute("step 4") - context.execute("step 5") - - if context.impl.transactional_ddl: - self._assert_impl_steps( - "step 1", - "BEGIN", - "step 2", - "step 3", - "step 4", - "COMMIT", - "step 5", - ) - else: - self._assert_impl_steps( - "step 1", "step 2", "step 3", "step 4", "step 5" - ) - - def test_transaction_per_migration_sqlmode(self): - context = self._fixture( - {"as_sql": True, "transaction_per_migration": True} - ) - - context.execute("step 1") - with context.begin_transaction(): - context.execute("step 2") - with context.begin_transaction(_per_migration=True): - context.execute("step 3") - - context.execute("step 4") - context.execute("step 5") - - if context.impl.transactional_ddl: - self._assert_impl_steps( - "step 1", - "step 2", - "BEGIN", - "step 3", - "COMMIT", - "step 4", - "step 5", - ) - else: - self._assert_impl_steps( - "step 1", "step 2", "step 3", "step 4", "step 5" - ) - - @config.requirements.autocommit_isolation - def test_autocommit_block(self): - context = self._fixture({"transaction_per_migration": True}) - - is_false(self.conn.in_transaction()) - - with context.begin_transaction(): - is_false(self.conn.in_transaction()) - with context.begin_transaction(_per_migration=True): - is_true(self.conn.in_transaction()) - - with context.autocommit_block(): - # in 1.x, self.conn is separate due to the - # execution_options call. however for future they are the - # same connection and there is a "transaction" block - # despite autocommit - if self.is_sqlalchemy_future: - is_(context.connection, self.conn) - else: - is_not_(context.connection, self.conn) - is_false(self.conn.in_transaction()) - - eq_( - context.connection._execution_options[ - "isolation_level" - ], - "AUTOCOMMIT", - ) - - ne_( - context.connection._execution_options.get( - "isolation_level", None - ), - "AUTOCOMMIT", - ) - is_true(self.conn.in_transaction()) - - is_false(self.conn.in_transaction()) - is_false(self.conn.in_transaction()) - - @config.requirements.autocommit_isolation - def test_autocommit_block_no_transaction(self): - context = self._fixture({"transaction_per_migration": True}) - - is_false(self.conn.in_transaction()) - - with context.autocommit_block(): - is_true(context.connection.in_transaction()) - - # in 1.x, self.conn is separate due to the execution_options - # call. however for future they are the same connection and there - # is a "transaction" block despite autocommit - if self.is_sqlalchemy_future: - is_(context.connection, self.conn) - else: - is_not_(context.connection, self.conn) - is_false(self.conn.in_transaction()) - - eq_( - context.connection._execution_options["isolation_level"], - "AUTOCOMMIT", - ) - - ne_( - context.connection._execution_options.get("isolation_level", None), - "AUTOCOMMIT", - ) - - is_false(self.conn.in_transaction()) - - def test_autocommit_block_transactional_ddl_sqlmode(self): - context = self._fixture( - { - "transaction_per_migration": True, - "transactional_ddl": True, - "as_sql": True, - } - ) - - with context.begin_transaction(): - context.execute("step 1") - with context.begin_transaction(_per_migration=True): - context.execute("step 2") - - with context.autocommit_block(): - context.execute("step 3") - - context.execute("step 4") - - context.execute("step 5") - - self._assert_impl_steps( - "step 1", - "BEGIN", - "step 2", - "COMMIT", - "step 3", - "BEGIN", - "step 4", - "COMMIT", - "step 5", - ) - - def test_autocommit_block_nontransactional_ddl_sqlmode(self): - context = self._fixture( - { - "transaction_per_migration": True, - "transactional_ddl": False, - "as_sql": True, - } - ) - - with context.begin_transaction(): - context.execute("step 1") - with context.begin_transaction(_per_migration=True): - context.execute("step 2") - - with context.autocommit_block(): - context.execute("step 3") - - context.execute("step 4") - - context.execute("step 5") - - self._assert_impl_steps( - "step 1", "step 2", "step 3", "step 4", "step 5" - ) - - def _assert_impl_steps(self, *steps): - to_check = self.context.output_buffer.getvalue() - - self.context.impl.output_buffer = buf = io.StringIO() - for step in steps: - if step == "BEGIN": - self.context.impl.emit_begin() - elif step == "COMMIT": - self.context.impl.emit_commit() - else: - self.context.impl._exec(step) - - eq_(to_check, buf.getvalue()) diff --git a/myenv/Lib/site-packages/alembic/testing/suite/test_op.py b/myenv/Lib/site-packages/alembic/testing/suite/test_op.py deleted file mode 100644 index a63b3f2..0000000 --- a/myenv/Lib/site-packages/alembic/testing/suite/test_op.py +++ /dev/null @@ -1,42 +0,0 @@ -"""Test against the builders in the op.* module.""" - -from sqlalchemy import Column -from sqlalchemy import event -from sqlalchemy import Integer -from sqlalchemy import String -from sqlalchemy import Table -from sqlalchemy.sql import text - -from ...testing.fixtures import AlterColRoundTripFixture -from ...testing.fixtures import TestBase - - -@event.listens_for(Table, "after_parent_attach") -def _add_cols(table, metadata): - if table.name == "tbl_with_auto_appended_column": - table.append_column(Column("bat", Integer)) - - -class BackendAlterColumnTest(AlterColRoundTripFixture, TestBase): - __backend__ = True - - def test_rename_column(self): - self._run_alter_col({}, {"name": "newname"}) - - def test_modify_type_int_str(self): - self._run_alter_col({"type": Integer()}, {"type": String(50)}) - - def test_add_server_default_int(self): - self._run_alter_col({"type": Integer}, {"server_default": text("5")}) - - def test_modify_server_default_int(self): - self._run_alter_col( - {"type": Integer, "server_default": text("2")}, - {"server_default": text("5")}, - ) - - def test_modify_nullable_to_non(self): - self._run_alter_col({}, {"nullable": False}) - - def test_modify_non_nullable_to_nullable(self): - self._run_alter_col({"nullable": False}, {"nullable": True}) diff --git a/myenv/Lib/site-packages/alembic/testing/util.py b/myenv/Lib/site-packages/alembic/testing/util.py deleted file mode 100644 index 4517a69..0000000 --- a/myenv/Lib/site-packages/alembic/testing/util.py +++ /dev/null @@ -1,126 +0,0 @@ -# testing/util.py -# Copyright (C) 2005-2019 the SQLAlchemy authors and contributors -# -# -# This module is part of SQLAlchemy and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php -from __future__ import annotations - -import types -from typing import Union - -from sqlalchemy.util import inspect_getfullargspec - -from ..util import sqla_2 - - -def flag_combinations(*combinations): - """A facade around @testing.combinations() oriented towards boolean - keyword-based arguments. - - Basically generates a nice looking identifier based on the keywords - and also sets up the argument names. - - E.g.:: - - @testing.flag_combinations( - dict(lazy=False, passive=False), - dict(lazy=True, passive=False), - dict(lazy=False, passive=True), - dict(lazy=False, passive=True, raiseload=True), - ) - - - would result in:: - - @testing.combinations( - ('', False, False, False), - ('lazy', True, False, False), - ('lazy_passive', True, True, False), - ('lazy_passive', True, True, True), - id_='iaaa', - argnames='lazy,passive,raiseload' - ) - - """ - from sqlalchemy.testing import config - - keys = set() - - for d in combinations: - keys.update(d) - - keys = sorted(keys) - - return config.combinations( - *[ - ("_".join(k for k in keys if d.get(k, False)),) - + tuple(d.get(k, False) for k in keys) - for d in combinations - ], - id_="i" + ("a" * len(keys)), - argnames=",".join(keys), - ) - - -def resolve_lambda(__fn, **kw): - """Given a no-arg lambda and a namespace, return a new lambda that - has all the values filled in. - - This is used so that we can have module-level fixtures that - refer to instance-level variables using lambdas. - - """ - - pos_args = inspect_getfullargspec(__fn)[0] - pass_pos_args = {arg: kw.pop(arg) for arg in pos_args} - glb = dict(__fn.__globals__) - glb.update(kw) - new_fn = types.FunctionType(__fn.__code__, glb) - return new_fn(**pass_pos_args) - - -def metadata_fixture(ddl="function"): - """Provide MetaData for a pytest fixture.""" - - from sqlalchemy.testing import config - from . import fixture_functions - - def decorate(fn): - def run_ddl(self): - from sqlalchemy import schema - - metadata = self.metadata = schema.MetaData() - try: - result = fn(self, metadata) - metadata.create_all(config.db) - # TODO: - # somehow get a per-function dml erase fixture here - yield result - finally: - metadata.drop_all(config.db) - - return fixture_functions.fixture(scope=ddl)(run_ddl) - - return decorate - - -def _safe_int(value: str) -> Union[int, str]: - try: - return int(value) - except: - return value - - -def testing_engine(url=None, options=None, future=False): - from sqlalchemy.testing import config - from sqlalchemy.testing.engines import testing_engine - - if not future: - future = getattr(config._current.options, "future_engine", False) - - if not sqla_2: - kw = {"future": future} if future else {} - else: - kw = {} - return testing_engine(url, options, **kw) diff --git a/myenv/Lib/site-packages/alembic/testing/warnings.py b/myenv/Lib/site-packages/alembic/testing/warnings.py deleted file mode 100644 index e87136b..0000000 --- a/myenv/Lib/site-packages/alembic/testing/warnings.py +++ /dev/null @@ -1,40 +0,0 @@ -# testing/warnings.py -# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors -# -# -# This module is part of SQLAlchemy and is released under -# the MIT License: http://www.opensource.org/licenses/mit-license.php - - -import warnings - -from sqlalchemy import exc as sa_exc - -from ..util import sqla_14 - - -def setup_filters(): - """Set global warning behavior for the test suite.""" - - warnings.resetwarnings() - - warnings.filterwarnings("error", category=sa_exc.SADeprecationWarning) - warnings.filterwarnings("error", category=sa_exc.SAWarning) - - # some selected deprecations... - warnings.filterwarnings("error", category=DeprecationWarning) - if not sqla_14: - # 1.3 uses pkg_resources in PluginLoader - warnings.filterwarnings( - "ignore", - "pkg_resources is deprecated as an API", - DeprecationWarning, - ) - try: - import pytest - except ImportError: - pass - else: - warnings.filterwarnings( - "once", category=pytest.PytestDeprecationWarning - ) diff --git a/myenv/Lib/site-packages/alembic/util/__init__.py b/myenv/Lib/site-packages/alembic/util/__init__.py deleted file mode 100644 index 4724e1f..0000000 --- a/myenv/Lib/site-packages/alembic/util/__init__.py +++ /dev/null @@ -1,35 +0,0 @@ -from .editor import open_in_editor as open_in_editor -from .exc import AutogenerateDiffsDetected as AutogenerateDiffsDetected -from .exc import CommandError as CommandError -from .langhelpers import _with_legacy_names as _with_legacy_names -from .langhelpers import asbool as asbool -from .langhelpers import dedupe_tuple as dedupe_tuple -from .langhelpers import Dispatcher as Dispatcher -from .langhelpers import EMPTY_DICT as EMPTY_DICT -from .langhelpers import immutabledict as immutabledict -from .langhelpers import memoized_property as memoized_property -from .langhelpers import ModuleClsProxy as ModuleClsProxy -from .langhelpers import not_none as not_none -from .langhelpers import rev_id as rev_id -from .langhelpers import to_list as to_list -from .langhelpers import to_tuple as to_tuple -from .langhelpers import unique_list as unique_list -from .messaging import err as err -from .messaging import format_as_comma as format_as_comma -from .messaging import msg as msg -from .messaging import obfuscate_url_pw as obfuscate_url_pw -from .messaging import status as status -from .messaging import warn as warn -from .messaging import write_outstream as write_outstream -from .pyfiles import coerce_resource_to_filename as coerce_resource_to_filename -from .pyfiles import load_python_file as load_python_file -from .pyfiles import pyc_file_from_path as pyc_file_from_path -from .pyfiles import template_to_file as template_to_file -from .sqla_compat import has_computed as has_computed -from .sqla_compat import sqla_13 as sqla_13 -from .sqla_compat import sqla_14 as sqla_14 -from .sqla_compat import sqla_2 as sqla_2 - - -if not sqla_13: - raise CommandError("SQLAlchemy 1.3.0 or greater is required.") diff --git a/myenv/Lib/site-packages/alembic/util/compat.py b/myenv/Lib/site-packages/alembic/util/compat.py deleted file mode 100644 index e185cc4..0000000 --- a/myenv/Lib/site-packages/alembic/util/compat.py +++ /dev/null @@ -1,89 +0,0 @@ -# mypy: no-warn-unused-ignores - -from __future__ import annotations - -from configparser import ConfigParser -import io -import os -import sys -import typing -from typing import Any -from typing import List -from typing import Optional -from typing import Sequence -from typing import Union - -if True: - # zimports hack for too-long names - from sqlalchemy.util import ( # noqa: F401 - inspect_getfullargspec as inspect_getfullargspec, - ) - from sqlalchemy.util.compat import ( # noqa: F401 - inspect_formatargspec as inspect_formatargspec, - ) - -is_posix = os.name == "posix" - -py311 = sys.version_info >= (3, 11) -py310 = sys.version_info >= (3, 10) -py39 = sys.version_info >= (3, 9) - - -# produce a wrapper that allows encoded text to stream -# into a given buffer, but doesn't close it. -# not sure of a more idiomatic approach to this. -class EncodedIO(io.TextIOWrapper): - def close(self) -> None: - pass - - -if py39: - from importlib import resources as _resources - - importlib_resources = _resources - from importlib import metadata as _metadata - - importlib_metadata = _metadata - from importlib.metadata import EntryPoint as EntryPoint -else: - import importlib_resources # type:ignore # noqa - import importlib_metadata # type:ignore # noqa - from importlib_metadata import EntryPoint # type:ignore # noqa - - -def importlib_metadata_get(group: str) -> Sequence[EntryPoint]: - ep = importlib_metadata.entry_points() - if hasattr(ep, "select"): - return ep.select(group=group) - else: - return ep.get(group, ()) # type: ignore - - -def formatannotation_fwdref( - annotation: Any, base_module: Optional[Any] = None -) -> str: - """vendored from python 3.7""" - # copied over _formatannotation from sqlalchemy 2.0 - - if isinstance(annotation, str): - return annotation - - if getattr(annotation, "__module__", None) == "typing": - return repr(annotation).replace("typing.", "").replace("~", "") - if isinstance(annotation, type): - if annotation.__module__ in ("builtins", base_module): - return repr(annotation.__qualname__) - return annotation.__module__ + "." + annotation.__qualname__ - elif isinstance(annotation, typing.TypeVar): - return repr(annotation).replace("~", "") - return repr(annotation).replace("~", "") - - -def read_config_parser( - file_config: ConfigParser, - file_argument: Sequence[Union[str, os.PathLike[str]]], -) -> List[str]: - if py310: - return file_config.read(file_argument, encoding="locale") - else: - return file_config.read(file_argument) diff --git a/myenv/Lib/site-packages/alembic/util/editor.py b/myenv/Lib/site-packages/alembic/util/editor.py deleted file mode 100644 index f1d1557..0000000 --- a/myenv/Lib/site-packages/alembic/util/editor.py +++ /dev/null @@ -1,81 +0,0 @@ -from __future__ import annotations - -import os -from os.path import exists -from os.path import join -from os.path import splitext -from subprocess import check_call -from typing import Dict -from typing import List -from typing import Mapping -from typing import Optional - -from .compat import is_posix -from .exc import CommandError - - -def open_in_editor( - filename: str, environ: Optional[Dict[str, str]] = None -) -> None: - """ - Opens the given file in a text editor. If the environment variable - ``EDITOR`` is set, this is taken as preference. - - Otherwise, a list of commonly installed editors is tried. - - If no editor matches, an :py:exc:`OSError` is raised. - - :param filename: The filename to open. Will be passed verbatim to the - editor command. - :param environ: An optional drop-in replacement for ``os.environ``. Used - mainly for testing. - """ - env = os.environ if environ is None else environ - try: - editor = _find_editor(env) - check_call([editor, filename]) - except Exception as exc: - raise CommandError("Error executing editor (%s)" % (exc,)) from exc - - -def _find_editor(environ: Mapping[str, str]) -> str: - candidates = _default_editors() - for i, var in enumerate(("EDITOR", "VISUAL")): - if var in environ: - user_choice = environ[var] - if exists(user_choice): - return user_choice - if os.sep not in user_choice: - candidates.insert(i, user_choice) - - for candidate in candidates: - path = _find_executable(candidate, environ) - if path is not None: - return path - raise OSError( - "No suitable editor found. Please set the " - '"EDITOR" or "VISUAL" environment variables' - ) - - -def _find_executable( - candidate: str, environ: Mapping[str, str] -) -> Optional[str]: - # Assuming this is on the PATH, we need to determine it's absolute - # location. Otherwise, ``check_call`` will fail - if not is_posix and splitext(candidate)[1] != ".exe": - candidate += ".exe" - for path in environ.get("PATH", "").split(os.pathsep): - value = join(path, candidate) - if exists(value): - return value - return None - - -def _default_editors() -> List[str]: - # Look for an editor. Prefer the user's choice by env-var, fall back to - # most commonly installed editor (nano/vim) - if is_posix: - return ["sensible-editor", "editor", "nano", "vim", "code"] - else: - return ["code.exe", "notepad++.exe", "notepad.exe"] diff --git a/myenv/Lib/site-packages/alembic/util/exc.py b/myenv/Lib/site-packages/alembic/util/exc.py deleted file mode 100644 index 0d0496b..0000000 --- a/myenv/Lib/site-packages/alembic/util/exc.py +++ /dev/null @@ -1,6 +0,0 @@ -class CommandError(Exception): - pass - - -class AutogenerateDiffsDetected(CommandError): - pass diff --git a/myenv/Lib/site-packages/alembic/util/langhelpers.py b/myenv/Lib/site-packages/alembic/util/langhelpers.py deleted file mode 100644 index 80d88cb..0000000 --- a/myenv/Lib/site-packages/alembic/util/langhelpers.py +++ /dev/null @@ -1,332 +0,0 @@ -from __future__ import annotations - -import collections -from collections.abc import Iterable -import textwrap -from typing import Any -from typing import Callable -from typing import cast -from typing import Dict -from typing import List -from typing import Mapping -from typing import MutableMapping -from typing import NoReturn -from typing import Optional -from typing import overload -from typing import Sequence -from typing import Set -from typing import Tuple -from typing import Type -from typing import TYPE_CHECKING -from typing import TypeVar -from typing import Union -import uuid -import warnings - -from sqlalchemy.util import asbool as asbool # noqa: F401 -from sqlalchemy.util import immutabledict as immutabledict # noqa: F401 -from sqlalchemy.util import to_list as to_list # noqa: F401 -from sqlalchemy.util import unique_list as unique_list - -from .compat import inspect_getfullargspec - -if True: - # zimports workaround :( - from sqlalchemy.util import ( # noqa: F401 - memoized_property as memoized_property, - ) - - -EMPTY_DICT: Mapping[Any, Any] = immutabledict() -_T = TypeVar("_T", bound=Any) - -_C = TypeVar("_C", bound=Callable[..., Any]) - - -class _ModuleClsMeta(type): - def __setattr__(cls, key: str, value: Callable[..., Any]) -> None: - super().__setattr__(key, value) - cls._update_module_proxies(key) # type: ignore - - -class ModuleClsProxy(metaclass=_ModuleClsMeta): - """Create module level proxy functions for the - methods on a given class. - - The functions will have a compatible signature - as the methods. - - """ - - _setups: Dict[ - Type[Any], - Tuple[ - Set[str], - List[Tuple[MutableMapping[str, Any], MutableMapping[str, Any]]], - ], - ] = collections.defaultdict(lambda: (set(), [])) - - @classmethod - def _update_module_proxies(cls, name: str) -> None: - attr_names, modules = cls._setups[cls] - for globals_, locals_ in modules: - cls._add_proxied_attribute(name, globals_, locals_, attr_names) - - def _install_proxy(self) -> None: - attr_names, modules = self._setups[self.__class__] - for globals_, locals_ in modules: - globals_["_proxy"] = self - for attr_name in attr_names: - globals_[attr_name] = getattr(self, attr_name) - - def _remove_proxy(self) -> None: - attr_names, modules = self._setups[self.__class__] - for globals_, locals_ in modules: - globals_["_proxy"] = None - for attr_name in attr_names: - del globals_[attr_name] - - @classmethod - def create_module_class_proxy( - cls, - globals_: MutableMapping[str, Any], - locals_: MutableMapping[str, Any], - ) -> None: - attr_names, modules = cls._setups[cls] - modules.append((globals_, locals_)) - cls._setup_proxy(globals_, locals_, attr_names) - - @classmethod - def _setup_proxy( - cls, - globals_: MutableMapping[str, Any], - locals_: MutableMapping[str, Any], - attr_names: Set[str], - ) -> None: - for methname in dir(cls): - cls._add_proxied_attribute(methname, globals_, locals_, attr_names) - - @classmethod - def _add_proxied_attribute( - cls, - methname: str, - globals_: MutableMapping[str, Any], - locals_: MutableMapping[str, Any], - attr_names: Set[str], - ) -> None: - if not methname.startswith("_"): - meth = getattr(cls, methname) - if callable(meth): - locals_[methname] = cls._create_method_proxy( - methname, globals_, locals_ - ) - else: - attr_names.add(methname) - - @classmethod - def _create_method_proxy( - cls, - name: str, - globals_: MutableMapping[str, Any], - locals_: MutableMapping[str, Any], - ) -> Callable[..., Any]: - fn = getattr(cls, name) - - def _name_error(name: str, from_: Exception) -> NoReturn: - raise NameError( - "Can't invoke function '%s', as the proxy object has " - "not yet been " - "established for the Alembic '%s' class. " - "Try placing this code inside a callable." - % (name, cls.__name__) - ) from from_ - - globals_["_name_error"] = _name_error - - translations = getattr(fn, "_legacy_translations", []) - if translations: - spec = inspect_getfullargspec(fn) - if spec[0] and spec[0][0] == "self": - spec[0].pop(0) - - outer_args = inner_args = "*args, **kw" - translate_str = "args, kw = _translate(%r, %r, %r, args, kw)" % ( - fn.__name__, - tuple(spec), - translations, - ) - - def translate( - fn_name: str, spec: Any, translations: Any, args: Any, kw: Any - ) -> Any: - return_kw = {} - return_args = [] - - for oldname, newname in translations: - if oldname in kw: - warnings.warn( - "Argument %r is now named %r " - "for method %s()." % (oldname, newname, fn_name) - ) - return_kw[newname] = kw.pop(oldname) - return_kw.update(kw) - - args = list(args) - if spec[3]: - pos_only = spec[0][: -len(spec[3])] - else: - pos_only = spec[0] - for arg in pos_only: - if arg not in return_kw: - try: - return_args.append(args.pop(0)) - except IndexError: - raise TypeError( - "missing required positional argument: %s" - % arg - ) - return_args.extend(args) - - return return_args, return_kw - - globals_["_translate"] = translate - else: - outer_args = "*args, **kw" - inner_args = "*args, **kw" - translate_str = "" - - func_text = textwrap.dedent( - """\ - def %(name)s(%(args)s): - %(doc)r - %(translate)s - try: - p = _proxy - except NameError as ne: - _name_error('%(name)s', ne) - return _proxy.%(name)s(%(apply_kw)s) - e - """ - % { - "name": name, - "translate": translate_str, - "args": outer_args, - "apply_kw": inner_args, - "doc": fn.__doc__, - } - ) - lcl: MutableMapping[str, Any] = {} - - exec(func_text, cast("Dict[str, Any]", globals_), lcl) - return cast("Callable[..., Any]", lcl[name]) - - -def _with_legacy_names(translations: Any) -> Any: - def decorate(fn: _C) -> _C: - fn._legacy_translations = translations # type: ignore[attr-defined] - return fn - - return decorate - - -def rev_id() -> str: - return uuid.uuid4().hex[-12:] - - -@overload -def to_tuple(x: Any, default: Tuple[Any, ...]) -> Tuple[Any, ...]: ... - - -@overload -def to_tuple(x: None, default: Optional[_T] = ...) -> _T: ... - - -@overload -def to_tuple( - x: Any, default: Optional[Tuple[Any, ...]] = None -) -> Tuple[Any, ...]: ... - - -def to_tuple( - x: Any, default: Optional[Tuple[Any, ...]] = None -) -> Optional[Tuple[Any, ...]]: - if x is None: - return default - elif isinstance(x, str): - return (x,) - elif isinstance(x, Iterable): - return tuple(x) - else: - return (x,) - - -def dedupe_tuple(tup: Tuple[str, ...]) -> Tuple[str, ...]: - return tuple(unique_list(tup)) - - -class Dispatcher: - def __init__(self, uselist: bool = False) -> None: - self._registry: Dict[Tuple[Any, ...], Any] = {} - self.uselist = uselist - - def dispatch_for( - self, target: Any, qualifier: str = "default" - ) -> Callable[[_C], _C]: - def decorate(fn: _C) -> _C: - if self.uselist: - self._registry.setdefault((target, qualifier), []).append(fn) - else: - assert (target, qualifier) not in self._registry - self._registry[(target, qualifier)] = fn - return fn - - return decorate - - def dispatch(self, obj: Any, qualifier: str = "default") -> Any: - if isinstance(obj, str): - targets: Sequence[Any] = [obj] - elif isinstance(obj, type): - targets = obj.__mro__ - else: - targets = type(obj).__mro__ - - for spcls in targets: - if qualifier != "default" and (spcls, qualifier) in self._registry: - return self._fn_or_list(self._registry[(spcls, qualifier)]) - elif (spcls, "default") in self._registry: - return self._fn_or_list(self._registry[(spcls, "default")]) - else: - raise ValueError("no dispatch function for object: %s" % obj) - - def _fn_or_list( - self, fn_or_list: Union[List[Callable[..., Any]], Callable[..., Any]] - ) -> Callable[..., Any]: - if self.uselist: - - def go(*arg: Any, **kw: Any) -> None: - if TYPE_CHECKING: - assert isinstance(fn_or_list, Sequence) - for fn in fn_or_list: - fn(*arg, **kw) - - return go - else: - return fn_or_list # type: ignore - - def branch(self) -> Dispatcher: - """Return a copy of this dispatcher that is independently - writable.""" - - d = Dispatcher() - if self.uselist: - d._registry.update( - (k, [fn for fn in self._registry[k]]) for k in self._registry - ) - else: - d._registry.update(self._registry) - return d - - -def not_none(value: Optional[_T]) -> _T: - assert value is not None - return value diff --git a/myenv/Lib/site-packages/alembic/util/messaging.py b/myenv/Lib/site-packages/alembic/util/messaging.py deleted file mode 100644 index 6618fa7..0000000 --- a/myenv/Lib/site-packages/alembic/util/messaging.py +++ /dev/null @@ -1,121 +0,0 @@ -from __future__ import annotations - -from collections.abc import Iterable -from contextlib import contextmanager -import logging -import sys -import textwrap -from typing import Iterator -from typing import Optional -from typing import TextIO -from typing import Union -import warnings - -from sqlalchemy.engine import url - -from . import sqla_compat - -log = logging.getLogger(__name__) - -# disable "no handler found" errors -logging.getLogger("alembic").addHandler(logging.NullHandler()) - - -try: - import fcntl - import termios - import struct - - ioctl = fcntl.ioctl(0, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)) - _h, TERMWIDTH, _hp, _wp = struct.unpack("HHHH", ioctl) - if TERMWIDTH <= 0: # can occur if running in emacs pseudo-tty - TERMWIDTH = None -except (ImportError, OSError): - TERMWIDTH = None - - -def write_outstream( - stream: TextIO, *text: Union[str, bytes], quiet: bool = False -) -> None: - if quiet: - return - encoding = getattr(stream, "encoding", "ascii") or "ascii" - for t in text: - if not isinstance(t, bytes): - t = t.encode(encoding, "replace") - t = t.decode(encoding) - try: - stream.write(t) - except OSError: - # suppress "broken pipe" errors. - # no known way to handle this on Python 3 however - # as the exception is "ignored" (noisily) in TextIOWrapper. - break - - -@contextmanager -def status( - status_msg: str, newline: bool = False, quiet: bool = False -) -> Iterator[None]: - msg(status_msg + " ...", newline, flush=True, quiet=quiet) - try: - yield - except: - if not quiet: - write_outstream(sys.stdout, " FAILED\n") - raise - else: - if not quiet: - write_outstream(sys.stdout, " done\n") - - -def err(message: str, quiet: bool = False) -> None: - log.error(message) - msg(f"FAILED: {message}", quiet=quiet) - sys.exit(-1) - - -def obfuscate_url_pw(input_url: str) -> str: - u = url.make_url(input_url) - return sqla_compat.url_render_as_string(u, hide_password=True) # type: ignore # noqa: E501 - - -def warn(msg: str, stacklevel: int = 2) -> None: - warnings.warn(msg, UserWarning, stacklevel=stacklevel) - - -def msg( - msg: str, newline: bool = True, flush: bool = False, quiet: bool = False -) -> None: - if quiet: - return - if TERMWIDTH is None: - write_outstream(sys.stdout, msg) - if newline: - write_outstream(sys.stdout, "\n") - else: - # left indent output lines - indent = " " - lines = textwrap.wrap( - msg, - TERMWIDTH, - initial_indent=indent, - subsequent_indent=indent, - ) - if len(lines) > 1: - for line in lines[0:-1]: - write_outstream(sys.stdout, line, "\n") - write_outstream(sys.stdout, lines[-1], ("\n" if newline else "")) - if flush: - sys.stdout.flush() - - -def format_as_comma(value: Optional[Union[str, Iterable[str]]]) -> str: - if value is None: - return "" - elif isinstance(value, str): - return value - elif isinstance(value, Iterable): - return ", ".join(value) - else: - raise ValueError("Don't know how to comma-format %r" % value) diff --git a/myenv/Lib/site-packages/alembic/util/pyfiles.py b/myenv/Lib/site-packages/alembic/util/pyfiles.py deleted file mode 100644 index 973bd45..0000000 --- a/myenv/Lib/site-packages/alembic/util/pyfiles.py +++ /dev/null @@ -1,114 +0,0 @@ -from __future__ import annotations - -import atexit -from contextlib import ExitStack -import importlib -import importlib.machinery -import importlib.util -import os -import re -import tempfile -from types import ModuleType -from typing import Any -from typing import Optional - -from mako import exceptions -from mako.template import Template - -from . import compat -from .exc import CommandError - - -def template_to_file( - template_file: str, dest: str, output_encoding: str, **kw: Any -) -> None: - template = Template(filename=template_file) - try: - output = template.render_unicode(**kw).encode(output_encoding) - except: - with tempfile.NamedTemporaryFile(suffix=".txt", delete=False) as ntf: - ntf.write( - exceptions.text_error_template() - .render_unicode() - .encode(output_encoding) - ) - fname = ntf.name - raise CommandError( - "Template rendering failed; see %s for a " - "template-oriented traceback." % fname - ) - else: - with open(dest, "wb") as f: - f.write(output) - - -def coerce_resource_to_filename(fname: str) -> str: - """Interpret a filename as either a filesystem location or as a package - resource. - - Names that are non absolute paths and contain a colon - are interpreted as resources and coerced to a file location. - - """ - if not os.path.isabs(fname) and ":" in fname: - tokens = fname.split(":") - - # from https://importlib-resources.readthedocs.io/en/latest/migration.html#pkg-resources-resource-filename # noqa E501 - - file_manager = ExitStack() - atexit.register(file_manager.close) - - ref = compat.importlib_resources.files(tokens[0]) - for tok in tokens[1:]: - ref = ref / tok - fname = file_manager.enter_context( # type: ignore[assignment] - compat.importlib_resources.as_file(ref) - ) - return fname - - -def pyc_file_from_path(path: str) -> Optional[str]: - """Given a python source path, locate the .pyc.""" - - candidate = importlib.util.cache_from_source(path) - if os.path.exists(candidate): - return candidate - - # even for pep3147, fall back to the old way of finding .pyc files, - # to support sourceless operation - filepath, ext = os.path.splitext(path) - for ext in importlib.machinery.BYTECODE_SUFFIXES: - if os.path.exists(filepath + ext): - return filepath + ext - else: - return None - - -def load_python_file(dir_: str, filename: str) -> ModuleType: - """Load a file from the given path as a Python module.""" - - module_id = re.sub(r"\W", "_", filename) - path = os.path.join(dir_, filename) - _, ext = os.path.splitext(filename) - if ext == ".py": - if os.path.exists(path): - module = load_module_py(module_id, path) - else: - pyc_path = pyc_file_from_path(path) - if pyc_path is None: - raise ImportError("Can't find Python file %s" % path) - else: - module = load_module_py(module_id, pyc_path) - elif ext in (".pyc", ".pyo"): - module = load_module_py(module_id, path) - else: - assert False - return module - - -def load_module_py(module_id: str, path: str) -> ModuleType: - spec = importlib.util.spec_from_file_location(module_id, path) - assert spec - module = importlib.util.module_from_spec(spec) - spec.loader.exec_module(module) # type: ignore - return module diff --git a/myenv/Lib/site-packages/alembic/util/sqla_compat.py b/myenv/Lib/site-packages/alembic/util/sqla_compat.py deleted file mode 100644 index d4ed0fd..0000000 --- a/myenv/Lib/site-packages/alembic/util/sqla_compat.py +++ /dev/null @@ -1,663 +0,0 @@ -# mypy: allow-untyped-defs, allow-incomplete-defs, allow-untyped-calls -# mypy: no-warn-return-any, allow-any-generics - -from __future__ import annotations - -import contextlib -import re -from typing import Any -from typing import Callable -from typing import Dict -from typing import Iterable -from typing import Iterator -from typing import Mapping -from typing import Optional -from typing import Protocol -from typing import Set -from typing import Type -from typing import TYPE_CHECKING -from typing import TypeVar -from typing import Union - -from sqlalchemy import __version__ -from sqlalchemy import inspect -from sqlalchemy import schema -from sqlalchemy import sql -from sqlalchemy import types as sqltypes -from sqlalchemy.engine import url -from sqlalchemy.schema import CheckConstraint -from sqlalchemy.schema import Column -from sqlalchemy.schema import ForeignKeyConstraint -from sqlalchemy.sql import visitors -from sqlalchemy.sql.base import DialectKWArgs -from sqlalchemy.sql.elements import BindParameter -from sqlalchemy.sql.elements import ColumnClause -from sqlalchemy.sql.elements import quoted_name -from sqlalchemy.sql.elements import TextClause -from sqlalchemy.sql.elements import UnaryExpression -from sqlalchemy.sql.visitors import traverse -from typing_extensions import TypeGuard - -if TYPE_CHECKING: - from sqlalchemy import ClauseElement - from sqlalchemy import Index - from sqlalchemy import Table - from sqlalchemy.engine import Connection - from sqlalchemy.engine import Dialect - from sqlalchemy.engine import Transaction - from sqlalchemy.engine.reflection import Inspector - from sqlalchemy.sql.base import ColumnCollection - from sqlalchemy.sql.compiler import SQLCompiler - from sqlalchemy.sql.dml import Insert - from sqlalchemy.sql.elements import ColumnElement - from sqlalchemy.sql.schema import Constraint - from sqlalchemy.sql.schema import SchemaItem - from sqlalchemy.sql.selectable import Select - from sqlalchemy.sql.selectable import TableClause - -_CE = TypeVar("_CE", bound=Union["ColumnElement[Any]", "SchemaItem"]) - - -class _CompilerProtocol(Protocol): - def __call__(self, element: Any, compiler: Any, **kw: Any) -> str: ... - - -def _safe_int(value: str) -> Union[int, str]: - try: - return int(value) - except: - return value - - -_vers = tuple( - [_safe_int(x) for x in re.findall(r"(\d+|[abc]\d)", __version__)] -) -sqla_13 = _vers >= (1, 3) -sqla_14 = _vers >= (1, 4) -# https://docs.sqlalchemy.org/en/latest/changelog/changelog_14.html#change-0c6e0cc67dfe6fac5164720e57ef307d -sqla_14_18 = _vers >= (1, 4, 18) -sqla_14_26 = _vers >= (1, 4, 26) -sqla_2 = _vers >= (2,) -sqlalchemy_version = __version__ - -try: - from sqlalchemy.sql.naming import _NONE_NAME as _NONE_NAME # type: ignore[attr-defined] # noqa: E501 -except ImportError: - from sqlalchemy.sql.elements import _NONE_NAME as _NONE_NAME # type: ignore # noqa: E501 - - -class _Unsupported: - "Placeholder for unsupported SQLAlchemy classes" - - -if TYPE_CHECKING: - - def compiles( - element: Type[ClauseElement], *dialects: str - ) -> Callable[[_CompilerProtocol], _CompilerProtocol]: ... - -else: - from sqlalchemy.ext.compiler import compiles - -try: - from sqlalchemy import Computed as Computed -except ImportError: - if not TYPE_CHECKING: - - class Computed(_Unsupported): - pass - - has_computed = False - has_computed_reflection = False -else: - has_computed = True - has_computed_reflection = _vers >= (1, 3, 16) - -try: - from sqlalchemy import Identity as Identity -except ImportError: - if not TYPE_CHECKING: - - class Identity(_Unsupported): - pass - - has_identity = False -else: - identity_has_dialect_kwargs = issubclass(Identity, DialectKWArgs) - - def _get_identity_options_dict( - identity: Union[Identity, schema.Sequence, None], - dialect_kwargs: bool = False, - ) -> Dict[str, Any]: - if identity is None: - return {} - elif identity_has_dialect_kwargs: - as_dict = identity._as_dict() # type: ignore - if dialect_kwargs: - assert isinstance(identity, DialectKWArgs) - as_dict.update(identity.dialect_kwargs) - else: - as_dict = {} - if isinstance(identity, Identity): - # always=None means something different than always=False - as_dict["always"] = identity.always - if identity.on_null is not None: - as_dict["on_null"] = identity.on_null - # attributes common to Identity and Sequence - attrs = ( - "start", - "increment", - "minvalue", - "maxvalue", - "nominvalue", - "nomaxvalue", - "cycle", - "cache", - "order", - ) - as_dict.update( - { - key: getattr(identity, key, None) - for key in attrs - if getattr(identity, key, None) is not None - } - ) - return as_dict - - has_identity = True - -if sqla_2: - from sqlalchemy.sql.base import _NoneName -else: - from sqlalchemy.util import symbol as _NoneName # type: ignore[assignment] - - -_ConstraintName = Union[None, str, _NoneName] - -_ConstraintNameDefined = Union[str, _NoneName] - - -def constraint_name_defined( - name: _ConstraintName, -) -> TypeGuard[_ConstraintNameDefined]: - return name is _NONE_NAME or isinstance(name, (str, _NoneName)) - - -def constraint_name_string( - name: _ConstraintName, -) -> TypeGuard[str]: - return isinstance(name, str) - - -def constraint_name_or_none( - name: _ConstraintName, -) -> Optional[str]: - return name if constraint_name_string(name) else None - - -AUTOINCREMENT_DEFAULT = "auto" - - -@contextlib.contextmanager -def _ensure_scope_for_ddl( - connection: Optional[Connection], -) -> Iterator[None]: - try: - in_transaction = connection.in_transaction # type: ignore[union-attr] - except AttributeError: - # catch for MockConnection, None - in_transaction = None - pass - - # yield outside the catch - if in_transaction is None: - yield - else: - if not in_transaction(): - assert connection is not None - with connection.begin(): - yield - else: - yield - - -def url_render_as_string(url, hide_password=True): - if sqla_14: - return url.render_as_string(hide_password=hide_password) - else: - return url.__to_string__(hide_password=hide_password) - - -def _safe_begin_connection_transaction( - connection: Connection, -) -> Transaction: - transaction = _get_connection_transaction(connection) - if transaction: - return transaction - else: - return connection.begin() - - -def _safe_commit_connection_transaction( - connection: Connection, -) -> None: - transaction = _get_connection_transaction(connection) - if transaction: - transaction.commit() - - -def _safe_rollback_connection_transaction( - connection: Connection, -) -> None: - transaction = _get_connection_transaction(connection) - if transaction: - transaction.rollback() - - -def _get_connection_in_transaction(connection: Optional[Connection]) -> bool: - try: - in_transaction = connection.in_transaction # type: ignore - except AttributeError: - # catch for MockConnection - return False - else: - return in_transaction() - - -def _idx_table_bound_expressions(idx: Index) -> Iterable[ColumnElement[Any]]: - return idx.expressions # type: ignore - - -def _copy(schema_item: _CE, **kw) -> _CE: - if hasattr(schema_item, "_copy"): - return schema_item._copy(**kw) - else: - return schema_item.copy(**kw) # type: ignore[union-attr] - - -def _get_connection_transaction( - connection: Connection, -) -> Optional[Transaction]: - if sqla_14: - return connection.get_transaction() - else: - r = connection._root # type: ignore[attr-defined] - return r._Connection__transaction - - -def _create_url(*arg, **kw) -> url.URL: - if hasattr(url.URL, "create"): - return url.URL.create(*arg, **kw) - else: - return url.URL(*arg, **kw) - - -def _connectable_has_table( - connectable: Connection, tablename: str, schemaname: Union[str, None] -) -> bool: - if sqla_14: - return inspect(connectable).has_table(tablename, schemaname) - else: - return connectable.dialect.has_table( - connectable, tablename, schemaname - ) - - -def _exec_on_inspector(inspector, statement, **params): - if sqla_14: - with inspector._operation_context() as conn: - return conn.execute(statement, params) - else: - return inspector.bind.execute(statement, params) - - -def _nullability_might_be_unset(metadata_column): - if not sqla_14: - return metadata_column.nullable - else: - from sqlalchemy.sql import schema - - return ( - metadata_column._user_defined_nullable is schema.NULL_UNSPECIFIED - ) - - -def _server_default_is_computed(*server_default) -> bool: - if not has_computed: - return False - else: - return any(isinstance(sd, Computed) for sd in server_default) - - -def _server_default_is_identity(*server_default) -> bool: - if not sqla_14: - return False - else: - return any(isinstance(sd, Identity) for sd in server_default) - - -def _table_for_constraint(constraint: Constraint) -> Table: - if isinstance(constraint, ForeignKeyConstraint): - table = constraint.parent - assert table is not None - return table # type: ignore[return-value] - else: - return constraint.table - - -def _columns_for_constraint(constraint): - if isinstance(constraint, ForeignKeyConstraint): - return [fk.parent for fk in constraint.elements] - elif isinstance(constraint, CheckConstraint): - return _find_columns(constraint.sqltext) - else: - return list(constraint.columns) - - -def _reflect_table(inspector: Inspector, table: Table) -> None: - if sqla_14: - return inspector.reflect_table(table, None) - else: - return inspector.reflecttable( # type: ignore[attr-defined] - table, None - ) - - -def _resolve_for_variant(type_, dialect): - if _type_has_variants(type_): - base_type, mapping = _get_variant_mapping(type_) - return mapping.get(dialect.name, base_type) - else: - return type_ - - -if hasattr(sqltypes.TypeEngine, "_variant_mapping"): - - def _type_has_variants(type_): - return bool(type_._variant_mapping) - - def _get_variant_mapping(type_): - return type_, type_._variant_mapping - -else: - - def _type_has_variants(type_): - return type(type_) is sqltypes.Variant - - def _get_variant_mapping(type_): - return type_.impl, type_.mapping - - -def _fk_spec(constraint: ForeignKeyConstraint) -> Any: - if TYPE_CHECKING: - assert constraint.columns is not None - assert constraint.elements is not None - assert isinstance(constraint.parent, Table) - - source_columns = [ - constraint.columns[key].name for key in constraint.column_keys - ] - - source_table = constraint.parent.name - source_schema = constraint.parent.schema - target_schema = constraint.elements[0].column.table.schema - target_table = constraint.elements[0].column.table.name - target_columns = [element.column.name for element in constraint.elements] - ondelete = constraint.ondelete - onupdate = constraint.onupdate - deferrable = constraint.deferrable - initially = constraint.initially - return ( - source_schema, - source_table, - source_columns, - target_schema, - target_table, - target_columns, - onupdate, - ondelete, - deferrable, - initially, - ) - - -def _fk_is_self_referential(constraint: ForeignKeyConstraint) -> bool: - spec = constraint.elements[0]._get_colspec() - tokens = spec.split(".") - tokens.pop(-1) # colname - tablekey = ".".join(tokens) - assert constraint.parent is not None - return tablekey == constraint.parent.key - - -def _is_type_bound(constraint: Constraint) -> bool: - # this deals with SQLAlchemy #3260, don't copy CHECK constraints - # that will be generated by the type. - # new feature added for #3260 - return constraint._type_bound - - -def _find_columns(clause): - """locate Column objects within the given expression.""" - - cols: Set[ColumnElement[Any]] = set() - traverse(clause, {}, {"column": cols.add}) - return cols - - -def _remove_column_from_collection( - collection: ColumnCollection, column: Union[Column[Any], ColumnClause[Any]] -) -> None: - """remove a column from a ColumnCollection.""" - - # workaround for older SQLAlchemy, remove the - # same object that's present - assert column.key is not None - to_remove = collection[column.key] - - # SQLAlchemy 2.0 will use more ReadOnlyColumnCollection - # (renamed from ImmutableColumnCollection) - if hasattr(collection, "_immutable") or hasattr(collection, "_readonly"): - collection._parent.remove(to_remove) - else: - collection.remove(to_remove) - - -def _textual_index_column( - table: Table, text_: Union[str, TextClause, ColumnElement[Any]] -) -> Union[ColumnElement[Any], Column[Any]]: - """a workaround for the Index construct's severe lack of flexibility""" - if isinstance(text_, str): - c = Column(text_, sqltypes.NULLTYPE) - table.append_column(c) - return c - elif isinstance(text_, TextClause): - return _textual_index_element(table, text_) - elif isinstance(text_, _textual_index_element): - return _textual_index_column(table, text_.text) - elif isinstance(text_, sql.ColumnElement): - return _copy_expression(text_, table) - else: - raise ValueError("String or text() construct expected") - - -def _copy_expression(expression: _CE, target_table: Table) -> _CE: - def replace(col): - if ( - isinstance(col, Column) - and col.table is not None - and col.table is not target_table - ): - if col.name in target_table.c: - return target_table.c[col.name] - else: - c = _copy(col) - target_table.append_column(c) - return c - else: - return None - - return visitors.replacement_traverse( # type: ignore[call-overload] - expression, {}, replace - ) - - -class _textual_index_element(sql.ColumnElement): - """Wrap around a sqlalchemy text() construct in such a way that - we appear like a column-oriented SQL expression to an Index - construct. - - The issue here is that currently the Postgresql dialect, the biggest - recipient of functional indexes, keys all the index expressions to - the corresponding column expressions when rendering CREATE INDEX, - so the Index we create here needs to have a .columns collection that - is the same length as the .expressions collection. Ultimately - SQLAlchemy should support text() expressions in indexes. - - See SQLAlchemy issue 3174. - - """ - - __visit_name__ = "_textual_idx_element" - - def __init__(self, table: Table, text: TextClause) -> None: - self.table = table - self.text = text - self.key = text.text - self.fake_column = schema.Column(self.text.text, sqltypes.NULLTYPE) - table.append_column(self.fake_column) - - def get_children(self, **kw): - return [self.fake_column] - - -@compiles(_textual_index_element) -def _render_textual_index_column( - element: _textual_index_element, compiler: SQLCompiler, **kw -) -> str: - return compiler.process(element.text, **kw) - - -class _literal_bindparam(BindParameter): - pass - - -@compiles(_literal_bindparam) -def _render_literal_bindparam( - element: _literal_bindparam, compiler: SQLCompiler, **kw -) -> str: - return compiler.render_literal_bindparam(element, **kw) - - -def _column_kwargs(col: Column) -> Mapping: - if sqla_13: - return col.kwargs - else: - return {} - - -def _get_constraint_final_name( - constraint: Union[Index, Constraint], dialect: Optional[Dialect] -) -> Optional[str]: - if constraint.name is None: - return None - assert dialect is not None - if sqla_14: - # for SQLAlchemy 1.4 we would like to have the option to expand - # the use of "deferred" names for constraints as well as to have - # some flexibility with "None" name and similar; make use of new - # SQLAlchemy API to return what would be the final compiled form of - # the name for this dialect. - return dialect.identifier_preparer.format_constraint( - constraint, _alembic_quote=False - ) - else: - # prior to SQLAlchemy 1.4, work around quoting logic to get at the - # final compiled name without quotes. - if hasattr(constraint.name, "quote"): - # might be quoted_name, might be truncated_name, keep it the - # same - quoted_name_cls: type = type(constraint.name) - else: - quoted_name_cls = quoted_name - - new_name = quoted_name_cls(str(constraint.name), quote=False) - constraint = constraint.__class__(name=new_name) - - if isinstance(constraint, schema.Index): - # name should not be quoted. - d = dialect.ddl_compiler(dialect, None) # type: ignore[arg-type] - return d._prepared_index_name(constraint) - else: - # name should not be quoted. - return dialect.identifier_preparer.format_constraint(constraint) - - -def _constraint_is_named( - constraint: Union[Constraint, Index], dialect: Optional[Dialect] -) -> bool: - if sqla_14: - if constraint.name is None: - return False - assert dialect is not None - name = dialect.identifier_preparer.format_constraint( - constraint, _alembic_quote=False - ) - return name is not None - else: - return constraint.name is not None - - -def _is_mariadb(mysql_dialect: Dialect) -> bool: - if sqla_14: - return mysql_dialect.is_mariadb # type: ignore[attr-defined] - else: - return bool( - mysql_dialect.server_version_info - and mysql_dialect._is_mariadb # type: ignore[attr-defined] - ) - - -def _mariadb_normalized_version_info(mysql_dialect): - return mysql_dialect._mariadb_normalized_version_info - - -def _insert_inline(table: Union[TableClause, Table]) -> Insert: - if sqla_14: - return table.insert().inline() - else: - return table.insert(inline=True) # type: ignore[call-arg] - - -if sqla_14: - from sqlalchemy import create_mock_engine - - # weird mypy workaround - from sqlalchemy import select as _sa_select - - _select = _sa_select -else: - from sqlalchemy import create_engine - - def create_mock_engine(url, executor, **kw): # type: ignore[misc] - return create_engine( - "postgresql://", strategy="mock", executor=executor - ) - - def _select(*columns, **kw) -> Select: - return sql.select(list(columns), **kw) # type: ignore[call-overload] - - -def is_expression_index(index: Index) -> bool: - for expr in index.expressions: - if is_expression(expr): - return True - return False - - -def is_expression(expr: Any) -> bool: - while isinstance(expr, UnaryExpression): - expr = expr.element - if not isinstance(expr, ColumnClause) or expr.is_literal: - return True - return False diff --git a/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/INSTALLER b/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/METADATA b/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/METADATA deleted file mode 100644 index 3ac05cf..0000000 --- a/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/METADATA +++ /dev/null @@ -1,295 +0,0 @@ -Metadata-Version: 2.3 -Name: annotated-types -Version: 0.7.0 -Summary: Reusable constraint types to use with typing.Annotated -Project-URL: Homepage, https://github.com/annotated-types/annotated-types -Project-URL: Source, https://github.com/annotated-types/annotated-types -Project-URL: Changelog, https://github.com/annotated-types/annotated-types/releases -Author-email: Adrian Garcia Badaracco <1755071+adriangb@users.noreply.github.com>, Samuel Colvin , Zac Hatfield-Dodds -License-File: LICENSE -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Console -Classifier: Environment :: MacOS X -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: Information Technology -Classifier: License :: OSI Approved :: MIT License -Classifier: Operating System :: POSIX :: Linux -Classifier: Operating System :: Unix -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Typing :: Typed -Requires-Python: >=3.8 -Requires-Dist: typing-extensions>=4.0.0; python_version < '3.9' -Description-Content-Type: text/markdown - -# annotated-types - -[![CI](https://github.com/annotated-types/annotated-types/workflows/CI/badge.svg?event=push)](https://github.com/annotated-types/annotated-types/actions?query=event%3Apush+branch%3Amain+workflow%3ACI) -[![pypi](https://img.shields.io/pypi/v/annotated-types.svg)](https://pypi.python.org/pypi/annotated-types) -[![versions](https://img.shields.io/pypi/pyversions/annotated-types.svg)](https://github.com/annotated-types/annotated-types) -[![license](https://img.shields.io/github/license/annotated-types/annotated-types.svg)](https://github.com/annotated-types/annotated-types/blob/main/LICENSE) - -[PEP-593](https://peps.python.org/pep-0593/) added `typing.Annotated` as a way of -adding context-specific metadata to existing types, and specifies that -`Annotated[T, x]` _should_ be treated as `T` by any tool or library without special -logic for `x`. - -This package provides metadata objects which can be used to represent common -constraints such as upper and lower bounds on scalar values and collection sizes, -a `Predicate` marker for runtime checks, and -descriptions of how we intend these metadata to be interpreted. In some cases, -we also note alternative representations which do not require this package. - -## Install - -```bash -pip install annotated-types -``` - -## Examples - -```python -from typing import Annotated -from annotated_types import Gt, Len, Predicate - -class MyClass: - age: Annotated[int, Gt(18)] # Valid: 19, 20, ... - # Invalid: 17, 18, "19", 19.0, ... - factors: list[Annotated[int, Predicate(is_prime)]] # Valid: 2, 3, 5, 7, 11, ... - # Invalid: 4, 8, -2, 5.0, "prime", ... - - my_list: Annotated[list[int], Len(0, 10)] # Valid: [], [10, 20, 30, 40, 50] - # Invalid: (1, 2), ["abc"], [0] * 20 -``` - -## Documentation - -_While `annotated-types` avoids runtime checks for performance, users should not -construct invalid combinations such as `MultipleOf("non-numeric")` or `Annotated[int, Len(3)]`. -Downstream implementors may choose to raise an error, emit a warning, silently ignore -a metadata item, etc., if the metadata objects described below are used with an -incompatible type - or for any other reason!_ - -### Gt, Ge, Lt, Le - -Express inclusive and/or exclusive bounds on orderable values - which may be numbers, -dates, times, strings, sets, etc. Note that the boundary value need not be of the -same type that was annotated, so long as they can be compared: `Annotated[int, Gt(1.5)]` -is fine, for example, and implies that the value is an integer x such that `x > 1.5`. - -We suggest that implementors may also interpret `functools.partial(operator.le, 1.5)` -as being equivalent to `Gt(1.5)`, for users who wish to avoid a runtime dependency on -the `annotated-types` package. - -To be explicit, these types have the following meanings: - -* `Gt(x)` - value must be "Greater Than" `x` - equivalent to exclusive minimum -* `Ge(x)` - value must be "Greater than or Equal" to `x` - equivalent to inclusive minimum -* `Lt(x)` - value must be "Less Than" `x` - equivalent to exclusive maximum -* `Le(x)` - value must be "Less than or Equal" to `x` - equivalent to inclusive maximum - -### Interval - -`Interval(gt, ge, lt, le)` allows you to specify an upper and lower bound with a single -metadata object. `None` attributes should be ignored, and non-`None` attributes -treated as per the single bounds above. - -### MultipleOf - -`MultipleOf(multiple_of=x)` might be interpreted in two ways: - -1. Python semantics, implying `value % multiple_of == 0`, or -2. [JSONschema semantics](https://json-schema.org/draft/2020-12/json-schema-validation.html#rfc.section.6.2.1), - where `int(value / multiple_of) == value / multiple_of`. - -We encourage users to be aware of these two common interpretations and their -distinct behaviours, especially since very large or non-integer numbers make -it easy to cause silent data corruption due to floating-point imprecision. - -We encourage libraries to carefully document which interpretation they implement. - -### MinLen, MaxLen, Len - -`Len()` implies that `min_length <= len(value) <= max_length` - lower and upper bounds are inclusive. - -As well as `Len()` which can optionally include upper and lower bounds, we also -provide `MinLen(x)` and `MaxLen(y)` which are equivalent to `Len(min_length=x)` -and `Len(max_length=y)` respectively. - -`Len`, `MinLen`, and `MaxLen` may be used with any type which supports `len(value)`. - -Examples of usage: - -* `Annotated[list, MaxLen(10)]` (or `Annotated[list, Len(max_length=10))`) - list must have a length of 10 or less -* `Annotated[str, MaxLen(10)]` - string must have a length of 10 or less -* `Annotated[list, MinLen(3))` (or `Annotated[list, Len(min_length=3))`) - list must have a length of 3 or more -* `Annotated[list, Len(4, 6)]` - list must have a length of 4, 5, or 6 -* `Annotated[list, Len(8, 8)]` - list must have a length of exactly 8 - -#### Changed in v0.4.0 - -* `min_inclusive` has been renamed to `min_length`, no change in meaning -* `max_exclusive` has been renamed to `max_length`, upper bound is now **inclusive** instead of **exclusive** -* The recommendation that slices are interpreted as `Len` has been removed due to ambiguity and different semantic - meaning of the upper bound in slices vs. `Len` - -See [issue #23](https://github.com/annotated-types/annotated-types/issues/23) for discussion. - -### Timezone - -`Timezone` can be used with a `datetime` or a `time` to express which timezones -are allowed. `Annotated[datetime, Timezone(None)]` must be a naive datetime. -`Timezone[...]` ([literal ellipsis](https://docs.python.org/3/library/constants.html#Ellipsis)) -expresses that any timezone-aware datetime is allowed. You may also pass a specific -timezone string or [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) -object such as `Timezone(timezone.utc)` or `Timezone("Africa/Abidjan")` to express that you only -allow a specific timezone, though we note that this is often a symptom of fragile design. - -#### Changed in v0.x.x - -* `Timezone` accepts [`tzinfo`](https://docs.python.org/3/library/datetime.html#tzinfo-objects) objects instead of - `timezone`, extending compatibility to [`zoneinfo`](https://docs.python.org/3/library/zoneinfo.html) and third party libraries. - -### Unit - -`Unit(unit: str)` expresses that the annotated numeric value is the magnitude of -a quantity with the specified unit. For example, `Annotated[float, Unit("m/s")]` -would be a float representing a velocity in meters per second. - -Please note that `annotated_types` itself makes no attempt to parse or validate -the unit string in any way. That is left entirely to downstream libraries, -such as [`pint`](https://pint.readthedocs.io) or -[`astropy.units`](https://docs.astropy.org/en/stable/units/). - -An example of how a library might use this metadata: - -```python -from annotated_types import Unit -from typing import Annotated, TypeVar, Callable, Any, get_origin, get_args - -# given a type annotated with a unit: -Meters = Annotated[float, Unit("m")] - - -# you can cast the annotation to a specific unit type with any -# callable that accepts a string and returns the desired type -T = TypeVar("T") -def cast_unit(tp: Any, unit_cls: Callable[[str], T]) -> T | None: - if get_origin(tp) is Annotated: - for arg in get_args(tp): - if isinstance(arg, Unit): - return unit_cls(arg.unit) - return None - - -# using `pint` -import pint -pint_unit = cast_unit(Meters, pint.Unit) - - -# using `astropy.units` -import astropy.units as u -astropy_unit = cast_unit(Meters, u.Unit) -``` - -### Predicate - -`Predicate(func: Callable)` expresses that `func(value)` is truthy for valid values. -Users should prefer the statically inspectable metadata above, but if you need -the full power and flexibility of arbitrary runtime predicates... here it is. - -For some common constraints, we provide generic types: - -* `IsLower = Annotated[T, Predicate(str.islower)]` -* `IsUpper = Annotated[T, Predicate(str.isupper)]` -* `IsDigit = Annotated[T, Predicate(str.isdigit)]` -* `IsFinite = Annotated[T, Predicate(math.isfinite)]` -* `IsNotFinite = Annotated[T, Predicate(Not(math.isfinite))]` -* `IsNan = Annotated[T, Predicate(math.isnan)]` -* `IsNotNan = Annotated[T, Predicate(Not(math.isnan))]` -* `IsInfinite = Annotated[T, Predicate(math.isinf)]` -* `IsNotInfinite = Annotated[T, Predicate(Not(math.isinf))]` - -so that you can write e.g. `x: IsFinite[float] = 2.0` instead of the longer -(but exactly equivalent) `x: Annotated[float, Predicate(math.isfinite)] = 2.0`. - -Some libraries might have special logic to handle known or understandable predicates, -for example by checking for `str.isdigit` and using its presence to both call custom -logic to enforce digit-only strings, and customise some generated external schema. -Users are therefore encouraged to avoid indirection like `lambda s: s.lower()`, in -favor of introspectable methods such as `str.lower` or `re.compile("pattern").search`. - -To enable basic negation of commonly used predicates like `math.isnan` without introducing introspection that makes it impossible for implementers to introspect the predicate we provide a `Not` wrapper that simply negates the predicate in an introspectable manner. Several of the predicates listed above are created in this manner. - -We do not specify what behaviour should be expected for predicates that raise -an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently -skip invalid constraints, or statically raise an error; or it might try calling it -and then propagate or discard the resulting -`TypeError: descriptor 'isdigit' for 'str' objects doesn't apply to a 'int' object` -exception. We encourage libraries to document the behaviour they choose. - -### Doc - -`doc()` can be used to add documentation information in `Annotated`, for function and method parameters, variables, class attributes, return types, and any place where `Annotated` can be used. - -It expects a value that can be statically analyzed, as the main use case is for static analysis, editors, documentation generators, and similar tools. - -It returns a `DocInfo` class with a single attribute `documentation` containing the value passed to `doc()`. - -This is the early adopter's alternative form of the [`typing-doc` proposal](https://github.com/tiangolo/fastapi/blob/typing-doc/typing_doc.md). - -### Integrating downstream types with `GroupedMetadata` - -Implementers may choose to provide a convenience wrapper that groups multiple pieces of metadata. -This can help reduce verbosity and cognitive overhead for users. -For example, an implementer like Pydantic might provide a `Field` or `Meta` type that accepts keyword arguments and transforms these into low-level metadata: - -```python -from dataclasses import dataclass -from typing import Iterator -from annotated_types import GroupedMetadata, Ge - -@dataclass -class Field(GroupedMetadata): - ge: int | None = None - description: str | None = None - - def __iter__(self) -> Iterator[object]: - # Iterating over a GroupedMetadata object should yield annotated-types - # constraint metadata objects which describe it as fully as possible, - # and may include other unknown objects too. - if self.ge is not None: - yield Ge(self.ge) - if self.description is not None: - yield Description(self.description) -``` - -Libraries consuming annotated-types constraints should check for `GroupedMetadata` and unpack it by iterating over the object and treating the results as if they had been "unpacked" in the `Annotated` type. The same logic should be applied to the [PEP 646 `Unpack` type](https://peps.python.org/pep-0646/), so that `Annotated[T, Field(...)]`, `Annotated[T, Unpack[Field(...)]]` and `Annotated[T, *Field(...)]` are all treated consistently. - -Libraries consuming annotated-types should also ignore any metadata they do not recongize that came from unpacking a `GroupedMetadata`, just like they ignore unrecognized metadata in `Annotated` itself. - -Our own `annotated_types.Interval` class is a `GroupedMetadata` which unpacks itself into `Gt`, `Lt`, etc., so this is not an abstract concern. Similarly, `annotated_types.Len` is a `GroupedMetadata` which unpacks itself into `MinLen` (optionally) and `MaxLen`. - -### Consuming metadata - -We intend to not be prescriptive as to _how_ the metadata and constraints are used, but as an example of how one might parse constraints from types annotations see our [implementation in `test_main.py`](https://github.com/annotated-types/annotated-types/blob/f59cf6d1b5255a0fe359b93896759a180bec30ae/tests/test_main.py#L94-L103). - -It is up to the implementer to determine how this metadata is used. -You could use the metadata for runtime type checking, for generating schemas or to generate example data, amongst other use cases. - -## Design & History - -This package was designed at the PyCon 2022 sprints by the maintainers of Pydantic -and Hypothesis, with the goal of making it as easy as possible for end-users to -provide more informative annotations for use by runtime libraries. - -It is deliberately minimal, and following PEP-593 allows considerable downstream -discretion in what (if anything!) they choose to support. Nonetheless, we expect -that staying simple and covering _only_ the most common use-cases will give users -and maintainers the best experience we can. If you'd like more constraints for your -types - follow our lead, by defining them and documenting them downstream! diff --git a/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/RECORD b/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/RECORD deleted file mode 100644 index 7045729..0000000 --- a/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/RECORD +++ /dev/null @@ -1,10 +0,0 @@ -annotated_types-0.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -annotated_types-0.7.0.dist-info/METADATA,sha256=7ltqxksJJ0wCYFGBNIQCWTlWQGeAH0hRFdnK3CB895E,15046 -annotated_types-0.7.0.dist-info/RECORD,, -annotated_types-0.7.0.dist-info/WHEEL,sha256=zEMcRr9Kr03x1ozGwg5v9NQBKn3kndp6LSoSlVg-jhU,87 -annotated_types-0.7.0.dist-info/licenses/LICENSE,sha256=_hBJiEsaDZNCkB6I4H8ykl0ksxIdmXK2poBfuYJLCV0,1083 -annotated_types/__init__.py,sha256=RynLsRKUEGI0KimXydlD1fZEfEzWwDo0Uon3zOKhG1Q,13819 -annotated_types/__pycache__/__init__.cpython-312.pyc,, -annotated_types/__pycache__/test_cases.cpython-312.pyc,, -annotated_types/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -annotated_types/test_cases.py,sha256=zHFX6EpcMbGJ8FzBYDbO56bPwx_DYIVSKbZM-4B3_lg,6421 diff --git a/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/WHEEL b/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/WHEEL deleted file mode 100644 index 516596c..0000000 --- a/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.24.2 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE b/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE deleted file mode 100644 index d99323a..0000000 --- a/myenv/Lib/site-packages/annotated_types-0.7.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2022 the contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/myenv/Lib/site-packages/annotated_types/__init__.py b/myenv/Lib/site-packages/annotated_types/__init__.py deleted file mode 100644 index 74e0dee..0000000 --- a/myenv/Lib/site-packages/annotated_types/__init__.py +++ /dev/null @@ -1,432 +0,0 @@ -import math -import sys -import types -from dataclasses import dataclass -from datetime import tzinfo -from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, SupportsFloat, SupportsIndex, TypeVar, Union - -if sys.version_info < (3, 8): - from typing_extensions import Protocol, runtime_checkable -else: - from typing import Protocol, runtime_checkable - -if sys.version_info < (3, 9): - from typing_extensions import Annotated, Literal -else: - from typing import Annotated, Literal - -if sys.version_info < (3, 10): - EllipsisType = type(Ellipsis) - KW_ONLY = {} - SLOTS = {} -else: - from types import EllipsisType - - KW_ONLY = {"kw_only": True} - SLOTS = {"slots": True} - - -__all__ = ( - 'BaseMetadata', - 'GroupedMetadata', - 'Gt', - 'Ge', - 'Lt', - 'Le', - 'Interval', - 'MultipleOf', - 'MinLen', - 'MaxLen', - 'Len', - 'Timezone', - 'Predicate', - 'LowerCase', - 'UpperCase', - 'IsDigits', - 'IsFinite', - 'IsNotFinite', - 'IsNan', - 'IsNotNan', - 'IsInfinite', - 'IsNotInfinite', - 'doc', - 'DocInfo', - '__version__', -) - -__version__ = '0.7.0' - - -T = TypeVar('T') - - -# arguments that start with __ are considered -# positional only -# see https://peps.python.org/pep-0484/#positional-only-arguments - - -class SupportsGt(Protocol): - def __gt__(self: T, __other: T) -> bool: - ... - - -class SupportsGe(Protocol): - def __ge__(self: T, __other: T) -> bool: - ... - - -class SupportsLt(Protocol): - def __lt__(self: T, __other: T) -> bool: - ... - - -class SupportsLe(Protocol): - def __le__(self: T, __other: T) -> bool: - ... - - -class SupportsMod(Protocol): - def __mod__(self: T, __other: T) -> T: - ... - - -class SupportsDiv(Protocol): - def __div__(self: T, __other: T) -> T: - ... - - -class BaseMetadata: - """Base class for all metadata. - - This exists mainly so that implementers - can do `isinstance(..., BaseMetadata)` while traversing field annotations. - """ - - __slots__ = () - - -@dataclass(frozen=True, **SLOTS) -class Gt(BaseMetadata): - """Gt(gt=x) implies that the value must be greater than x. - - It can be used with any type that supports the ``>`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - gt: SupportsGt - - -@dataclass(frozen=True, **SLOTS) -class Ge(BaseMetadata): - """Ge(ge=x) implies that the value must be greater than or equal to x. - - It can be used with any type that supports the ``>=`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - ge: SupportsGe - - -@dataclass(frozen=True, **SLOTS) -class Lt(BaseMetadata): - """Lt(lt=x) implies that the value must be less than x. - - It can be used with any type that supports the ``<`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - lt: SupportsLt - - -@dataclass(frozen=True, **SLOTS) -class Le(BaseMetadata): - """Le(le=x) implies that the value must be less than or equal to x. - - It can be used with any type that supports the ``<=`` operator, - including numbers, dates and times, strings, sets, and so on. - """ - - le: SupportsLe - - -@runtime_checkable -class GroupedMetadata(Protocol): - """A grouping of multiple objects, like typing.Unpack. - - `GroupedMetadata` on its own is not metadata and has no meaning. - All of the constraints and metadata should be fully expressable - in terms of the `BaseMetadata`'s returned by `GroupedMetadata.__iter__()`. - - Concrete implementations should override `GroupedMetadata.__iter__()` - to add their own metadata. - For example: - - >>> @dataclass - >>> class Field(GroupedMetadata): - >>> gt: float | None = None - >>> description: str | None = None - ... - >>> def __iter__(self) -> Iterable[object]: - >>> if self.gt is not None: - >>> yield Gt(self.gt) - >>> if self.description is not None: - >>> yield Description(self.gt) - - Also see the implementation of `Interval` below for an example. - - Parsers should recognize this and unpack it so that it can be used - both with and without unpacking: - - - `Annotated[int, Field(...)]` (parser must unpack Field) - - `Annotated[int, *Field(...)]` (PEP-646) - """ # noqa: trailing-whitespace - - @property - def __is_annotated_types_grouped_metadata__(self) -> Literal[True]: - return True - - def __iter__(self) -> Iterator[object]: - ... - - if not TYPE_CHECKING: - __slots__ = () # allow subclasses to use slots - - def __init_subclass__(cls, *args: Any, **kwargs: Any) -> None: - # Basic ABC like functionality without the complexity of an ABC - super().__init_subclass__(*args, **kwargs) - if cls.__iter__ is GroupedMetadata.__iter__: - raise TypeError("Can't subclass GroupedMetadata without implementing __iter__") - - def __iter__(self) -> Iterator[object]: # noqa: F811 - raise NotImplementedError # more helpful than "None has no attribute..." type errors - - -@dataclass(frozen=True, **KW_ONLY, **SLOTS) -class Interval(GroupedMetadata): - """Interval can express inclusive or exclusive bounds with a single object. - - It accepts keyword arguments ``gt``, ``ge``, ``lt``, and/or ``le``, which - are interpreted the same way as the single-bound constraints. - """ - - gt: Union[SupportsGt, None] = None - ge: Union[SupportsGe, None] = None - lt: Union[SupportsLt, None] = None - le: Union[SupportsLe, None] = None - - def __iter__(self) -> Iterator[BaseMetadata]: - """Unpack an Interval into zero or more single-bounds.""" - if self.gt is not None: - yield Gt(self.gt) - if self.ge is not None: - yield Ge(self.ge) - if self.lt is not None: - yield Lt(self.lt) - if self.le is not None: - yield Le(self.le) - - -@dataclass(frozen=True, **SLOTS) -class MultipleOf(BaseMetadata): - """MultipleOf(multiple_of=x) might be interpreted in two ways: - - 1. Python semantics, implying ``value % multiple_of == 0``, or - 2. JSONschema semantics, where ``int(value / multiple_of) == value / multiple_of`` - - We encourage users to be aware of these two common interpretations, - and libraries to carefully document which they implement. - """ - - multiple_of: Union[SupportsDiv, SupportsMod] - - -@dataclass(frozen=True, **SLOTS) -class MinLen(BaseMetadata): - """ - MinLen() implies minimum inclusive length, - e.g. ``len(value) >= min_length``. - """ - - min_length: Annotated[int, Ge(0)] - - -@dataclass(frozen=True, **SLOTS) -class MaxLen(BaseMetadata): - """ - MaxLen() implies maximum inclusive length, - e.g. ``len(value) <= max_length``. - """ - - max_length: Annotated[int, Ge(0)] - - -@dataclass(frozen=True, **SLOTS) -class Len(GroupedMetadata): - """ - Len() implies that ``min_length <= len(value) <= max_length``. - - Upper bound may be omitted or ``None`` to indicate no upper length bound. - """ - - min_length: Annotated[int, Ge(0)] = 0 - max_length: Optional[Annotated[int, Ge(0)]] = None - - def __iter__(self) -> Iterator[BaseMetadata]: - """Unpack a Len into zone or more single-bounds.""" - if self.min_length > 0: - yield MinLen(self.min_length) - if self.max_length is not None: - yield MaxLen(self.max_length) - - -@dataclass(frozen=True, **SLOTS) -class Timezone(BaseMetadata): - """Timezone(tz=...) requires a datetime to be aware (or ``tz=None``, naive). - - ``Annotated[datetime, Timezone(None)]`` must be a naive datetime. - ``Timezone[...]`` (the ellipsis literal) expresses that the datetime must be - tz-aware but any timezone is allowed. - - You may also pass a specific timezone string or tzinfo object such as - ``Timezone(timezone.utc)`` or ``Timezone("Africa/Abidjan")`` to express that - you only allow a specific timezone, though we note that this is often - a symptom of poor design. - """ - - tz: Union[str, tzinfo, EllipsisType, None] - - -@dataclass(frozen=True, **SLOTS) -class Unit(BaseMetadata): - """Indicates that the value is a physical quantity with the specified unit. - - It is intended for usage with numeric types, where the value represents the - magnitude of the quantity. For example, ``distance: Annotated[float, Unit('m')]`` - or ``speed: Annotated[float, Unit('m/s')]``. - - Interpretation of the unit string is left to the discretion of the consumer. - It is suggested to follow conventions established by python libraries that work - with physical quantities, such as - - - ``pint`` : - - ``astropy.units``: - - For indicating a quantity with a certain dimensionality but without a specific unit - it is recommended to use square brackets, e.g. `Annotated[float, Unit('[time]')]`. - Note, however, ``annotated_types`` itself makes no use of the unit string. - """ - - unit: str - - -@dataclass(frozen=True, **SLOTS) -class Predicate(BaseMetadata): - """``Predicate(func: Callable)`` implies `func(value)` is truthy for valid values. - - Users should prefer statically inspectable metadata, but if you need the full - power and flexibility of arbitrary runtime predicates... here it is. - - We provide a few predefined predicates for common string constraints: - ``IsLower = Predicate(str.islower)``, ``IsUpper = Predicate(str.isupper)``, and - ``IsDigits = Predicate(str.isdigit)``. Users are encouraged to use methods which - can be given special handling, and avoid indirection like ``lambda s: s.lower()``. - - Some libraries might have special logic to handle certain predicates, e.g. by - checking for `str.isdigit` and using its presence to both call custom logic to - enforce digit-only strings, and customise some generated external schema. - - We do not specify what behaviour should be expected for predicates that raise - an exception. For example `Annotated[int, Predicate(str.isdigit)]` might silently - skip invalid constraints, or statically raise an error; or it might try calling it - and then propagate or discard the resulting exception. - """ - - func: Callable[[Any], bool] - - def __repr__(self) -> str: - if getattr(self.func, "__name__", "") == "": - return f"{self.__class__.__name__}({self.func!r})" - if isinstance(self.func, (types.MethodType, types.BuiltinMethodType)) and ( - namespace := getattr(self.func.__self__, "__name__", None) - ): - return f"{self.__class__.__name__}({namespace}.{self.func.__name__})" - if isinstance(self.func, type(str.isascii)): # method descriptor - return f"{self.__class__.__name__}({self.func.__qualname__})" - return f"{self.__class__.__name__}({self.func.__name__})" - - -@dataclass -class Not: - func: Callable[[Any], bool] - - def __call__(self, __v: Any) -> bool: - return not self.func(__v) - - -_StrType = TypeVar("_StrType", bound=str) - -LowerCase = Annotated[_StrType, Predicate(str.islower)] -""" -Return True if the string is a lowercase string, False otherwise. - -A string is lowercase if all cased characters in the string are lowercase and there is at least one cased character in the string. -""" # noqa: E501 -UpperCase = Annotated[_StrType, Predicate(str.isupper)] -""" -Return True if the string is an uppercase string, False otherwise. - -A string is uppercase if all cased characters in the string are uppercase and there is at least one cased character in the string. -""" # noqa: E501 -IsDigit = Annotated[_StrType, Predicate(str.isdigit)] -IsDigits = IsDigit # type: ignore # plural for backwards compatibility, see #63 -""" -Return True if the string is a digit string, False otherwise. - -A string is a digit string if all characters in the string are digits and there is at least one character in the string. -""" # noqa: E501 -IsAscii = Annotated[_StrType, Predicate(str.isascii)] -""" -Return True if all characters in the string are ASCII, False otherwise. - -ASCII characters have code points in the range U+0000-U+007F. Empty string is ASCII too. -""" - -_NumericType = TypeVar('_NumericType', bound=Union[SupportsFloat, SupportsIndex]) -IsFinite = Annotated[_NumericType, Predicate(math.isfinite)] -"""Return True if x is neither an infinity nor a NaN, and False otherwise.""" -IsNotFinite = Annotated[_NumericType, Predicate(Not(math.isfinite))] -"""Return True if x is one of infinity or NaN, and False otherwise""" -IsNan = Annotated[_NumericType, Predicate(math.isnan)] -"""Return True if x is a NaN (not a number), and False otherwise.""" -IsNotNan = Annotated[_NumericType, Predicate(Not(math.isnan))] -"""Return True if x is anything but NaN (not a number), and False otherwise.""" -IsInfinite = Annotated[_NumericType, Predicate(math.isinf)] -"""Return True if x is a positive or negative infinity, and False otherwise.""" -IsNotInfinite = Annotated[_NumericType, Predicate(Not(math.isinf))] -"""Return True if x is neither a positive or negative infinity, and False otherwise.""" - -try: - from typing_extensions import DocInfo, doc # type: ignore [attr-defined] -except ImportError: - - @dataclass(frozen=True, **SLOTS) - class DocInfo: # type: ignore [no-redef] - """ " - The return value of doc(), mainly to be used by tools that want to extract the - Annotated documentation at runtime. - """ - - documentation: str - """The documentation string passed to doc().""" - - def doc( - documentation: str, - ) -> DocInfo: - """ - Add documentation to a type annotation inside of Annotated. - - For example: - - >>> def hi(name: Annotated[int, doc("The name of the user")]) -> None: ... - """ - return DocInfo(documentation) diff --git a/myenv/Lib/site-packages/annotated_types/py.typed b/myenv/Lib/site-packages/annotated_types/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/annotated_types/test_cases.py b/myenv/Lib/site-packages/annotated_types/test_cases.py deleted file mode 100644 index d9164d6..0000000 --- a/myenv/Lib/site-packages/annotated_types/test_cases.py +++ /dev/null @@ -1,151 +0,0 @@ -import math -import sys -from datetime import date, datetime, timedelta, timezone -from decimal import Decimal -from typing import Any, Dict, Iterable, Iterator, List, NamedTuple, Set, Tuple - -if sys.version_info < (3, 9): - from typing_extensions import Annotated -else: - from typing import Annotated - -import annotated_types as at - - -class Case(NamedTuple): - """ - A test case for `annotated_types`. - """ - - annotation: Any - valid_cases: Iterable[Any] - invalid_cases: Iterable[Any] - - -def cases() -> Iterable[Case]: - # Gt, Ge, Lt, Le - yield Case(Annotated[int, at.Gt(4)], (5, 6, 1000), (4, 0, -1)) - yield Case(Annotated[float, at.Gt(0.5)], (0.6, 0.7, 0.8, 0.9), (0.5, 0.0, -0.1)) - yield Case( - Annotated[datetime, at.Gt(datetime(2000, 1, 1))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(2000, 1, 1), datetime(1999, 12, 31)], - ) - yield Case( - Annotated[datetime, at.Gt(date(2000, 1, 1))], - [date(2000, 1, 2), date(2000, 1, 3)], - [date(2000, 1, 1), date(1999, 12, 31)], - ) - yield Case( - Annotated[datetime, at.Gt(Decimal('1.123'))], - [Decimal('1.1231'), Decimal('123')], - [Decimal('1.123'), Decimal('0')], - ) - - yield Case(Annotated[int, at.Ge(4)], (4, 5, 6, 1000, 4), (0, -1)) - yield Case(Annotated[float, at.Ge(0.5)], (0.5, 0.6, 0.7, 0.8, 0.9), (0.4, 0.0, -0.1)) - yield Case( - Annotated[datetime, at.Ge(datetime(2000, 1, 1))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(1998, 1, 1), datetime(1999, 12, 31)], - ) - - yield Case(Annotated[int, at.Lt(4)], (0, -1), (4, 5, 6, 1000, 4)) - yield Case(Annotated[float, at.Lt(0.5)], (0.4, 0.0, -0.1), (0.5, 0.6, 0.7, 0.8, 0.9)) - yield Case( - Annotated[datetime, at.Lt(datetime(2000, 1, 1))], - [datetime(1999, 12, 31), datetime(1999, 12, 31)], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - ) - - yield Case(Annotated[int, at.Le(4)], (4, 0, -1), (5, 6, 1000)) - yield Case(Annotated[float, at.Le(0.5)], (0.5, 0.0, -0.1), (0.6, 0.7, 0.8, 0.9)) - yield Case( - Annotated[datetime, at.Le(datetime(2000, 1, 1))], - [datetime(2000, 1, 1), datetime(1999, 12, 31)], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - ) - - # Interval - yield Case(Annotated[int, at.Interval(gt=4)], (5, 6, 1000), (4, 0, -1)) - yield Case(Annotated[int, at.Interval(gt=4, lt=10)], (5, 6), (4, 10, 1000, 0, -1)) - yield Case(Annotated[float, at.Interval(ge=0.5, le=1)], (0.5, 0.9, 1), (0.49, 1.1)) - yield Case( - Annotated[datetime, at.Interval(gt=datetime(2000, 1, 1), le=datetime(2000, 1, 3))], - [datetime(2000, 1, 2), datetime(2000, 1, 3)], - [datetime(2000, 1, 1), datetime(2000, 1, 4)], - ) - - yield Case(Annotated[int, at.MultipleOf(multiple_of=3)], (0, 3, 9), (1, 2, 4)) - yield Case(Annotated[float, at.MultipleOf(multiple_of=0.5)], (0, 0.5, 1, 1.5), (0.4, 1.1)) - - # lengths - - yield Case(Annotated[str, at.MinLen(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) - yield Case(Annotated[str, at.Len(3)], ('123', '1234', 'x' * 10), ('', '1', '12')) - yield Case(Annotated[List[int], at.MinLen(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) - yield Case(Annotated[List[int], at.Len(3)], ([1, 2, 3], [1, 2, 3, 4], [1] * 10), ([], [1], [1, 2])) - - yield Case(Annotated[str, at.MaxLen(4)], ('', '1234'), ('12345', 'x' * 10)) - yield Case(Annotated[str, at.Len(0, 4)], ('', '1234'), ('12345', 'x' * 10)) - yield Case(Annotated[List[str], at.MaxLen(4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) - yield Case(Annotated[List[str], at.Len(0, 4)], ([], ['a', 'bcdef'], ['a', 'b', 'c']), (['a'] * 5, ['b'] * 10)) - - yield Case(Annotated[str, at.Len(3, 5)], ('123', '12345'), ('', '1', '12', '123456', 'x' * 10)) - yield Case(Annotated[str, at.Len(3, 3)], ('123',), ('12', '1234')) - - yield Case(Annotated[Dict[int, int], at.Len(2, 3)], [{1: 1, 2: 2}], [{}, {1: 1}, {1: 1, 2: 2, 3: 3, 4: 4}]) - yield Case(Annotated[Set[int], at.Len(2, 3)], ({1, 2}, {1, 2, 3}), (set(), {1}, {1, 2, 3, 4})) - yield Case(Annotated[Tuple[int, ...], at.Len(2, 3)], ((1, 2), (1, 2, 3)), ((), (1,), (1, 2, 3, 4))) - - # Timezone - - yield Case( - Annotated[datetime, at.Timezone(None)], [datetime(2000, 1, 1)], [datetime(2000, 1, 1, tzinfo=timezone.utc)] - ) - yield Case( - Annotated[datetime, at.Timezone(...)], [datetime(2000, 1, 1, tzinfo=timezone.utc)], [datetime(2000, 1, 1)] - ) - yield Case( - Annotated[datetime, at.Timezone(timezone.utc)], - [datetime(2000, 1, 1, tzinfo=timezone.utc)], - [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], - ) - yield Case( - Annotated[datetime, at.Timezone('Europe/London')], - [datetime(2000, 1, 1, tzinfo=timezone(timedelta(0), name='Europe/London'))], - [datetime(2000, 1, 1), datetime(2000, 1, 1, tzinfo=timezone(timedelta(hours=6)))], - ) - - # Quantity - - yield Case(Annotated[float, at.Unit(unit='m')], (5, 4.2), ('5m', '4.2m')) - - # predicate types - - yield Case(at.LowerCase[str], ['abc', 'foobar'], ['', 'A', 'Boom']) - yield Case(at.UpperCase[str], ['ABC', 'DEFO'], ['', 'a', 'abc', 'AbC']) - yield Case(at.IsDigit[str], ['123'], ['', 'ab', 'a1b2']) - yield Case(at.IsAscii[str], ['123', 'foo bar'], ['£100', '😊', 'whatever 👀']) - - yield Case(Annotated[int, at.Predicate(lambda x: x % 2 == 0)], [0, 2, 4], [1, 3, 5]) - - yield Case(at.IsFinite[float], [1.23], [math.nan, math.inf, -math.inf]) - yield Case(at.IsNotFinite[float], [math.nan, math.inf], [1.23]) - yield Case(at.IsNan[float], [math.nan], [1.23, math.inf]) - yield Case(at.IsNotNan[float], [1.23, math.inf], [math.nan]) - yield Case(at.IsInfinite[float], [math.inf], [math.nan, 1.23]) - yield Case(at.IsNotInfinite[float], [math.nan, 1.23], [math.inf]) - - # check stacked predicates - yield Case(at.IsInfinite[Annotated[float, at.Predicate(lambda x: x > 0)]], [math.inf], [-math.inf, 1.23, math.nan]) - - # doc - yield Case(Annotated[int, at.doc("A number")], [1, 2], []) - - # custom GroupedMetadata - class MyCustomGroupedMetadata(at.GroupedMetadata): - def __iter__(self) -> Iterator[at.Predicate]: - yield at.Predicate(lambda x: float(x).is_integer()) - - yield Case(Annotated[float, MyCustomGroupedMetadata()], [0, 2.0], [0.01, 1.5]) diff --git a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/INSTALLER b/myenv/Lib/site-packages/anyio-4.6.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/LICENSE b/myenv/Lib/site-packages/anyio-4.6.0.dist-info/LICENSE deleted file mode 100644 index 104eebf..0000000 --- a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2018 Alex Grönholm - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/METADATA b/myenv/Lib/site-packages/anyio-4.6.0.dist-info/METADATA deleted file mode 100644 index 747e994..0000000 --- a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/METADATA +++ /dev/null @@ -1,104 +0,0 @@ -Metadata-Version: 2.1 -Name: anyio -Version: 4.6.0 -Summary: High level compatibility layer for multiple asynchronous event loop implementations -Author-email: Alex Grönholm -License: MIT -Project-URL: Documentation, https://anyio.readthedocs.io/en/latest/ -Project-URL: Changelog, https://anyio.readthedocs.io/en/stable/versionhistory.html -Project-URL: Source code, https://github.com/agronholm/anyio -Project-URL: Issue tracker, https://github.com/agronholm/anyio/issues -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Framework :: AnyIO -Classifier: Typing :: Typed -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Requires-Python: >=3.9 -Description-Content-Type: text/x-rst -License-File: LICENSE -Requires-Dist: idna >=2.8 -Requires-Dist: sniffio >=1.1 -Requires-Dist: exceptiongroup >=1.0.2 ; python_version < "3.11" -Requires-Dist: typing-extensions >=4.1 ; python_version < "3.11" -Provides-Extra: doc -Requires-Dist: packaging ; extra == 'doc' -Requires-Dist: Sphinx ~=7.4 ; extra == 'doc' -Requires-Dist: sphinx-rtd-theme ; extra == 'doc' -Requires-Dist: sphinx-autodoc-typehints >=1.2.0 ; extra == 'doc' -Provides-Extra: test -Requires-Dist: anyio[trio] ; extra == 'test' -Requires-Dist: coverage[toml] >=7 ; extra == 'test' -Requires-Dist: exceptiongroup >=1.2.0 ; extra == 'test' -Requires-Dist: hypothesis >=4.0 ; extra == 'test' -Requires-Dist: psutil >=5.9 ; extra == 'test' -Requires-Dist: pytest >=7.0 ; extra == 'test' -Requires-Dist: pytest-mock >=3.6.1 ; extra == 'test' -Requires-Dist: trustme ; extra == 'test' -Requires-Dist: uvloop >=0.21.0b1 ; (platform_python_implementation == "CPython" and platform_system != "Windows") and extra == 'test' -Provides-Extra: trio -Requires-Dist: trio >=0.26.1 ; extra == 'trio' - -.. image:: https://github.com/agronholm/anyio/actions/workflows/test.yml/badge.svg - :target: https://github.com/agronholm/anyio/actions/workflows/test.yml - :alt: Build Status -.. image:: https://coveralls.io/repos/github/agronholm/anyio/badge.svg?branch=master - :target: https://coveralls.io/github/agronholm/anyio?branch=master - :alt: Code Coverage -.. image:: https://readthedocs.org/projects/anyio/badge/?version=latest - :target: https://anyio.readthedocs.io/en/latest/?badge=latest - :alt: Documentation -.. image:: https://badges.gitter.im/gitterHQ/gitter.svg - :target: https://gitter.im/python-trio/AnyIO - :alt: Gitter chat - -AnyIO is an asynchronous networking and concurrency library that works on top of either asyncio_ or -trio_. It implements trio-like `structured concurrency`_ (SC) on top of asyncio and works in harmony -with the native SC of trio itself. - -Applications and libraries written against AnyIO's API will run unmodified on either asyncio_ or -trio_. AnyIO can also be adopted into a library or application incrementally – bit by bit, no full -refactoring necessary. It will blend in with the native libraries of your chosen backend. - -Documentation -------------- - -View full documentation at: https://anyio.readthedocs.io/ - -Features --------- - -AnyIO offers the following functionality: - -* Task groups (nurseries_ in trio terminology) -* High-level networking (TCP, UDP and UNIX sockets) - - * `Happy eyeballs`_ algorithm for TCP connections (more robust than that of asyncio on Python - 3.8) - * async/await style UDP sockets (unlike asyncio where you still have to use Transports and - Protocols) - -* A versatile API for byte streams and object streams -* Inter-task synchronization and communication (locks, conditions, events, semaphores, object - streams) -* Worker threads -* Subprocesses -* Asynchronous file I/O (using worker threads) -* Signal handling - -AnyIO also comes with its own pytest_ plugin which also supports asynchronous fixtures. -It even works with the popular Hypothesis_ library. - -.. _asyncio: https://docs.python.org/3/library/asyncio.html -.. _trio: https://github.com/python-trio/trio -.. _structured concurrency: https://en.wikipedia.org/wiki/Structured_concurrency -.. _nurseries: https://trio.readthedocs.io/en/stable/reference-core.html#nurseries-and-spawning -.. _Happy eyeballs: https://en.wikipedia.org/wiki/Happy_Eyeballs -.. _pytest: https://docs.pytest.org/en/latest/ -.. _Hypothesis: https://hypothesis.works/ diff --git a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/RECORD b/myenv/Lib/site-packages/anyio-4.6.0.dist-info/RECORD deleted file mode 100644 index cd96a3c..0000000 --- a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/RECORD +++ /dev/null @@ -1,82 +0,0 @@ -anyio-4.6.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -anyio-4.6.0.dist-info/LICENSE,sha256=U2GsncWPLvX9LpsJxoKXwX8ElQkJu8gCO9uC6s8iwrA,1081 -anyio-4.6.0.dist-info/METADATA,sha256=2SuML65RDJNttz0javlp7HkyrfwOLtCHV3634Gvs_-Q,4608 -anyio-4.6.0.dist-info/RECORD,, -anyio-4.6.0.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91 -anyio-4.6.0.dist-info/entry_points.txt,sha256=_d6Yu6uiaZmNe0CydowirE9Cmg7zUL2g08tQpoS3Qvc,39 -anyio-4.6.0.dist-info/top_level.txt,sha256=QglSMiWX8_5dpoVAEIHdEYzvqFMdSYWmCj6tYw2ITkQ,6 -anyio/__init__.py,sha256=myTIdg75VPwA-9L7BpislRQplJUPMeleUBHa4MyIruw,4315 -anyio/__pycache__/__init__.cpython-312.pyc,, -anyio/__pycache__/from_thread.cpython-312.pyc,, -anyio/__pycache__/lowlevel.cpython-312.pyc,, -anyio/__pycache__/pytest_plugin.cpython-312.pyc,, -anyio/__pycache__/to_process.cpython-312.pyc,, -anyio/__pycache__/to_thread.cpython-312.pyc,, -anyio/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/_backends/__pycache__/__init__.cpython-312.pyc,, -anyio/_backends/__pycache__/_asyncio.cpython-312.pyc,, -anyio/_backends/__pycache__/_trio.cpython-312.pyc,, -anyio/_backends/_asyncio.py,sha256=ovOKhv3TLJ2RS1CW3y_RvLmp1-BbVvOR5sGRqAmegE4,90260 -anyio/_backends/_trio.py,sha256=S031md5CLTaTnKgKx2TTGoU1Hzv66v89byOhdVvGiT4,39782 -anyio/_core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/_core/__pycache__/__init__.cpython-312.pyc,, -anyio/_core/__pycache__/_eventloop.cpython-312.pyc,, -anyio/_core/__pycache__/_exceptions.cpython-312.pyc,, -anyio/_core/__pycache__/_fileio.cpython-312.pyc,, -anyio/_core/__pycache__/_resources.cpython-312.pyc,, -anyio/_core/__pycache__/_signals.cpython-312.pyc,, -anyio/_core/__pycache__/_sockets.cpython-312.pyc,, -anyio/_core/__pycache__/_streams.cpython-312.pyc,, -anyio/_core/__pycache__/_subprocesses.cpython-312.pyc,, -anyio/_core/__pycache__/_synchronization.cpython-312.pyc,, -anyio/_core/__pycache__/_tasks.cpython-312.pyc,, -anyio/_core/__pycache__/_testing.cpython-312.pyc,, -anyio/_core/__pycache__/_typedattr.cpython-312.pyc,, -anyio/_core/_eventloop.py,sha256=t_tAwBFPjF8jrZGjlJ6bbYy6KA3bjsbZxV9mvh9t1i0,4695 -anyio/_core/_exceptions.py,sha256=NPxECdXkG4nk3NOCUeFmBEAgPhmj7Bzs4vFAKaW_vqw,2481 -anyio/_core/_fileio.py,sha256=EKbLmYEZ3kcRNg3hoSpe65nn8zCMzszCYDvE3ZZQSwQ,20063 -anyio/_core/_resources.py,sha256=NbmU5O5UX3xEyACnkmYX28Fmwdl-f-ny0tHym26e0w0,435 -anyio/_core/_signals.py,sha256=vulT1M1xdLYtAR-eY5TamIgaf1WTlOwOrMGwswlTTr8,905 -anyio/_core/_sockets.py,sha256=iM3UeMU68n0PlQjl2U9HyiOpV26rnjqV4KBr_Fo2z1I,24293 -anyio/_core/_streams.py,sha256=OnaKgoDD-FcMSwLvkoAUGP51sG2ZdRvMpxt9q2w1gYA,1804 -anyio/_core/_subprocesses.py,sha256=WquR6sHrnaZofaeqnL8U4Yv___msVW_WqivleLHK4zI,7760 -anyio/_core/_synchronization.py,sha256=UDsbG5f8jWsWkRxYUOKp_WOBWCI9-vBO6wBrsR6WNjA,20121 -anyio/_core/_tasks.py,sha256=pvVEX2Fw159sf0ypAPerukKsZgRRwvFFedVW52nR2Vk,4764 -anyio/_core/_testing.py,sha256=YUGwA5cgFFbUTv4WFd7cv_BSVr4ryTtPp8owQA3JdWE,2118 -anyio/_core/_typedattr.py,sha256=P4ozZikn3-DbpoYcvyghS_FOYAgbmUxeoU8-L_07pZM,2508 -anyio/abc/__init__.py,sha256=U44_s3BglL8BojWQiq0KuokvCqkunIp-ySH3GyRXxAc,2681 -anyio/abc/__pycache__/__init__.cpython-312.pyc,, -anyio/abc/__pycache__/_eventloop.cpython-312.pyc,, -anyio/abc/__pycache__/_resources.cpython-312.pyc,, -anyio/abc/__pycache__/_sockets.cpython-312.pyc,, -anyio/abc/__pycache__/_streams.cpython-312.pyc,, -anyio/abc/__pycache__/_subprocesses.cpython-312.pyc,, -anyio/abc/__pycache__/_tasks.cpython-312.pyc,, -anyio/abc/__pycache__/_testing.cpython-312.pyc,, -anyio/abc/_eventloop.py,sha256=kdkLSnizMk3tPq61K109iPUQ6uXpvp1uNsj5aP1s0N8,9619 -anyio/abc/_resources.py,sha256=DrYvkNN1hH6Uvv5_5uKySvDsnknGVDe8FCKfko0VtN8,783 -anyio/abc/_sockets.py,sha256=KhWtJxan8jpBXKwPaFeQzI4iRXdFaOIn0HXtDZnaO7U,6262 -anyio/abc/_streams.py,sha256=GzST5Q2zQmxVzdrAqtbSyHNxkPlIC9AzeZJg_YyPAXw,6598 -anyio/abc/_subprocesses.py,sha256=cumAPJTktOQtw63IqG0lDpyZqu_l1EElvQHMiwJgL08,2067 -anyio/abc/_tasks.py,sha256=0Jc6oIwUjMIVReehF6knOZyAqlgwDt4TP1NQkx4IQGw,2731 -anyio/abc/_testing.py,sha256=tBJUzkSfOXJw23fe8qSJ03kJlShOYjjaEyFB6k6MYT8,1821 -anyio/from_thread.py,sha256=dbi5TUH45_Sg_jZ8Vv1NJWVohe0WeQ_OaCvXIKveAGg,17478 -anyio/lowlevel.py,sha256=nkgmW--SdxGVp0cmLUYazjkigveRm5HY7-gW8Bpp9oY,4169 -anyio/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/pytest_plugin.py,sha256=aBx2mk596KgDwnsICvhBfeGWFPKY9xVv0hrjndHFMXg,5839 -anyio/streams/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -anyio/streams/__pycache__/__init__.cpython-312.pyc,, -anyio/streams/__pycache__/buffered.cpython-312.pyc,, -anyio/streams/__pycache__/file.cpython-312.pyc,, -anyio/streams/__pycache__/memory.cpython-312.pyc,, -anyio/streams/__pycache__/stapled.cpython-312.pyc,, -anyio/streams/__pycache__/text.cpython-312.pyc,, -anyio/streams/__pycache__/tls.cpython-312.pyc,, -anyio/streams/buffered.py,sha256=UCldKC168YuLvT7n3HtNPnQ2iWAMSTYQWbZvzLwMwkM,4500 -anyio/streams/file.py,sha256=6uoTNb5KbMoj-6gS3_xrrL8uZN8Q4iIvOS1WtGyFfKw,4383 -anyio/streams/memory.py,sha256=j8AyOExK4-UPaon_Xbhwax25Vqs0DwFg3ZXc-EIiHjY,10550 -anyio/streams/stapled.py,sha256=U09pCrmOw9kkNhe6tKopsm1QIMT1lFTFvtb-A7SIe4k,4302 -anyio/streams/text.py,sha256=6x8w8xlfCZKTUWQoJiMPoMhSSJFUBRKgoBNSBtbd9yg,5094 -anyio/streams/tls.py,sha256=dvjnmGPxHlgkK9azi_mydLd2GbRqAvuGhcjEZGiPC6M,12745 -anyio/to_process.py,sha256=cR4n7TssbbJowE_9cWme49zaeuoBuMzqgZ6cBIs0YIs,9571 -anyio/to_thread.py,sha256=WM2JQ2MbVsd5D5CM08bQiTwzZIvpsGjfH1Fy247KoDQ,2396 diff --git a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/WHEEL b/myenv/Lib/site-packages/anyio-4.6.0.dist-info/WHEEL deleted file mode 100644 index dcfdc6e..0000000 --- a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.1.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/entry_points.txt b/myenv/Lib/site-packages/anyio-4.6.0.dist-info/entry_points.txt deleted file mode 100644 index 44dd9bd..0000000 --- a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[pytest11] -anyio = anyio.pytest_plugin diff --git a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/top_level.txt b/myenv/Lib/site-packages/anyio-4.6.0.dist-info/top_level.txt deleted file mode 100644 index c77c069..0000000 --- a/myenv/Lib/site-packages/anyio-4.6.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -anyio diff --git a/myenv/Lib/site-packages/anyio/__init__.py b/myenv/Lib/site-packages/anyio/__init__.py deleted file mode 100644 index fd9fe06..0000000 --- a/myenv/Lib/site-packages/anyio/__init__.py +++ /dev/null @@ -1,74 +0,0 @@ -from __future__ import annotations - -from ._core._eventloop import current_time as current_time -from ._core._eventloop import get_all_backends as get_all_backends -from ._core._eventloop import get_cancelled_exc_class as get_cancelled_exc_class -from ._core._eventloop import run as run -from ._core._eventloop import sleep as sleep -from ._core._eventloop import sleep_forever as sleep_forever -from ._core._eventloop import sleep_until as sleep_until -from ._core._exceptions import BrokenResourceError as BrokenResourceError -from ._core._exceptions import BrokenWorkerProcess as BrokenWorkerProcess -from ._core._exceptions import BusyResourceError as BusyResourceError -from ._core._exceptions import ClosedResourceError as ClosedResourceError -from ._core._exceptions import DelimiterNotFound as DelimiterNotFound -from ._core._exceptions import EndOfStream as EndOfStream -from ._core._exceptions import IncompleteRead as IncompleteRead -from ._core._exceptions import TypedAttributeLookupError as TypedAttributeLookupError -from ._core._exceptions import WouldBlock as WouldBlock -from ._core._fileio import AsyncFile as AsyncFile -from ._core._fileio import Path as Path -from ._core._fileio import open_file as open_file -from ._core._fileio import wrap_file as wrap_file -from ._core._resources import aclose_forcefully as aclose_forcefully -from ._core._signals import open_signal_receiver as open_signal_receiver -from ._core._sockets import connect_tcp as connect_tcp -from ._core._sockets import connect_unix as connect_unix -from ._core._sockets import create_connected_udp_socket as create_connected_udp_socket -from ._core._sockets import ( - create_connected_unix_datagram_socket as create_connected_unix_datagram_socket, -) -from ._core._sockets import create_tcp_listener as create_tcp_listener -from ._core._sockets import create_udp_socket as create_udp_socket -from ._core._sockets import create_unix_datagram_socket as create_unix_datagram_socket -from ._core._sockets import create_unix_listener as create_unix_listener -from ._core._sockets import getaddrinfo as getaddrinfo -from ._core._sockets import getnameinfo as getnameinfo -from ._core._sockets import wait_socket_readable as wait_socket_readable -from ._core._sockets import wait_socket_writable as wait_socket_writable -from ._core._streams import create_memory_object_stream as create_memory_object_stream -from ._core._subprocesses import open_process as open_process -from ._core._subprocesses import run_process as run_process -from ._core._synchronization import CapacityLimiter as CapacityLimiter -from ._core._synchronization import ( - CapacityLimiterStatistics as CapacityLimiterStatistics, -) -from ._core._synchronization import Condition as Condition -from ._core._synchronization import ConditionStatistics as ConditionStatistics -from ._core._synchronization import Event as Event -from ._core._synchronization import EventStatistics as EventStatistics -from ._core._synchronization import Lock as Lock -from ._core._synchronization import LockStatistics as LockStatistics -from ._core._synchronization import ResourceGuard as ResourceGuard -from ._core._synchronization import Semaphore as Semaphore -from ._core._synchronization import SemaphoreStatistics as SemaphoreStatistics -from ._core._tasks import TASK_STATUS_IGNORED as TASK_STATUS_IGNORED -from ._core._tasks import CancelScope as CancelScope -from ._core._tasks import create_task_group as create_task_group -from ._core._tasks import current_effective_deadline as current_effective_deadline -from ._core._tasks import fail_after as fail_after -from ._core._tasks import move_on_after as move_on_after -from ._core._testing import TaskInfo as TaskInfo -from ._core._testing import get_current_task as get_current_task -from ._core._testing import get_running_tasks as get_running_tasks -from ._core._testing import wait_all_tasks_blocked as wait_all_tasks_blocked -from ._core._typedattr import TypedAttributeProvider as TypedAttributeProvider -from ._core._typedattr import TypedAttributeSet as TypedAttributeSet -from ._core._typedattr import typed_attribute as typed_attribute - -# Re-export imports so they look like they live directly in this package -for __value in list(locals().values()): - if getattr(__value, "__module__", "").startswith("anyio."): - __value.__module__ = __name__ - -del __value diff --git a/myenv/Lib/site-packages/anyio/_backends/__init__.py b/myenv/Lib/site-packages/anyio/_backends/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/anyio/_backends/_asyncio.py b/myenv/Lib/site-packages/anyio/_backends/_asyncio.py deleted file mode 100644 index 9342fab..0000000 --- a/myenv/Lib/site-packages/anyio/_backends/_asyncio.py +++ /dev/null @@ -1,2735 +0,0 @@ -from __future__ import annotations - -import array -import asyncio -import concurrent.futures -import math -import os -import socket -import sys -import threading -import weakref -from asyncio import ( - AbstractEventLoop, - CancelledError, - all_tasks, - create_task, - current_task, - get_running_loop, - sleep, -) -from asyncio.base_events import _run_until_complete_cb # type: ignore[attr-defined] -from collections import OrderedDict, deque -from collections.abc import ( - AsyncGenerator, - AsyncIterator, - Awaitable, - Callable, - Collection, - Coroutine, - Iterable, - Sequence, -) -from concurrent.futures import Future -from contextlib import AbstractContextManager, suppress -from contextvars import Context, copy_context -from dataclasses import dataclass -from functools import partial, wraps -from inspect import ( - CORO_RUNNING, - CORO_SUSPENDED, - getcoroutinestate, - iscoroutine, -) -from io import IOBase -from os import PathLike -from queue import Queue -from signal import Signals -from socket import AddressFamily, SocketKind -from threading import Thread -from types import TracebackType -from typing import ( - IO, - Any, - Optional, - TypeVar, - cast, -) -from weakref import WeakKeyDictionary - -import sniffio - -from .. import ( - CapacityLimiterStatistics, - EventStatistics, - LockStatistics, - TaskInfo, - abc, -) -from .._core._eventloop import claim_worker_thread, threadlocals -from .._core._exceptions import ( - BrokenResourceError, - BusyResourceError, - ClosedResourceError, - EndOfStream, - WouldBlock, - iterate_exceptions, -) -from .._core._sockets import convert_ipv6_sockaddr -from .._core._streams import create_memory_object_stream -from .._core._synchronization import ( - CapacityLimiter as BaseCapacityLimiter, -) -from .._core._synchronization import Event as BaseEvent -from .._core._synchronization import Lock as BaseLock -from .._core._synchronization import ( - ResourceGuard, - SemaphoreStatistics, -) -from .._core._synchronization import Semaphore as BaseSemaphore -from .._core._tasks import CancelScope as BaseCancelScope -from ..abc import ( - AsyncBackend, - IPSockAddrType, - SocketListener, - UDPPacketType, - UNIXDatagramPacketType, -) -from ..abc._eventloop import StrOrBytesPath -from ..lowlevel import RunVar -from ..streams.memory import MemoryObjectReceiveStream, MemoryObjectSendStream - -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - -if sys.version_info >= (3, 11): - from asyncio import Runner - from typing import TypeVarTuple, Unpack -else: - import contextvars - import enum - import signal - from asyncio import coroutines, events, exceptions, tasks - - from exceptiongroup import BaseExceptionGroup - from typing_extensions import TypeVarTuple, Unpack - - class _State(enum.Enum): - CREATED = "created" - INITIALIZED = "initialized" - CLOSED = "closed" - - class Runner: - # Copied from CPython 3.11 - def __init__( - self, - *, - debug: bool | None = None, - loop_factory: Callable[[], AbstractEventLoop] | None = None, - ): - self._state = _State.CREATED - self._debug = debug - self._loop_factory = loop_factory - self._loop: AbstractEventLoop | None = None - self._context = None - self._interrupt_count = 0 - self._set_event_loop = False - - def __enter__(self) -> Runner: - self._lazy_init() - return self - - def __exit__( - self, - exc_type: type[BaseException], - exc_val: BaseException, - exc_tb: TracebackType, - ) -> None: - self.close() - - def close(self) -> None: - """Shutdown and close event loop.""" - if self._state is not _State.INITIALIZED: - return - try: - loop = self._loop - _cancel_all_tasks(loop) - loop.run_until_complete(loop.shutdown_asyncgens()) - if hasattr(loop, "shutdown_default_executor"): - loop.run_until_complete(loop.shutdown_default_executor()) - else: - loop.run_until_complete(_shutdown_default_executor(loop)) - finally: - if self._set_event_loop: - events.set_event_loop(None) - loop.close() - self._loop = None - self._state = _State.CLOSED - - def get_loop(self) -> AbstractEventLoop: - """Return embedded event loop.""" - self._lazy_init() - return self._loop - - def run(self, coro: Coroutine[T_Retval], *, context=None) -> T_Retval: - """Run a coroutine inside the embedded event loop.""" - if not coroutines.iscoroutine(coro): - raise ValueError(f"a coroutine was expected, got {coro!r}") - - if events._get_running_loop() is not None: - # fail fast with short traceback - raise RuntimeError( - "Runner.run() cannot be called from a running event loop" - ) - - self._lazy_init() - - if context is None: - context = self._context - task = context.run(self._loop.create_task, coro) - - if ( - threading.current_thread() is threading.main_thread() - and signal.getsignal(signal.SIGINT) is signal.default_int_handler - ): - sigint_handler = partial(self._on_sigint, main_task=task) - try: - signal.signal(signal.SIGINT, sigint_handler) - except ValueError: - # `signal.signal` may throw if `threading.main_thread` does - # not support signals (e.g. embedded interpreter with signals - # not registered - see gh-91880) - sigint_handler = None - else: - sigint_handler = None - - self._interrupt_count = 0 - try: - return self._loop.run_until_complete(task) - except exceptions.CancelledError: - if self._interrupt_count > 0: - uncancel = getattr(task, "uncancel", None) - if uncancel is not None and uncancel() == 0: - raise KeyboardInterrupt() - raise # CancelledError - finally: - if ( - sigint_handler is not None - and signal.getsignal(signal.SIGINT) is sigint_handler - ): - signal.signal(signal.SIGINT, signal.default_int_handler) - - def _lazy_init(self) -> None: - if self._state is _State.CLOSED: - raise RuntimeError("Runner is closed") - if self._state is _State.INITIALIZED: - return - if self._loop_factory is None: - self._loop = events.new_event_loop() - if not self._set_event_loop: - # Call set_event_loop only once to avoid calling - # attach_loop multiple times on child watchers - events.set_event_loop(self._loop) - self._set_event_loop = True - else: - self._loop = self._loop_factory() - if self._debug is not None: - self._loop.set_debug(self._debug) - self._context = contextvars.copy_context() - self._state = _State.INITIALIZED - - def _on_sigint(self, signum, frame, main_task: asyncio.Task) -> None: - self._interrupt_count += 1 - if self._interrupt_count == 1 and not main_task.done(): - main_task.cancel() - # wakeup loop if it is blocked by select() with long timeout - self._loop.call_soon_threadsafe(lambda: None) - return - raise KeyboardInterrupt() - - def _cancel_all_tasks(loop: AbstractEventLoop) -> None: - to_cancel = tasks.all_tasks(loop) - if not to_cancel: - return - - for task in to_cancel: - task.cancel() - - loop.run_until_complete(tasks.gather(*to_cancel, return_exceptions=True)) - - for task in to_cancel: - if task.cancelled(): - continue - if task.exception() is not None: - loop.call_exception_handler( - { - "message": "unhandled exception during asyncio.run() shutdown", - "exception": task.exception(), - "task": task, - } - ) - - async def _shutdown_default_executor(loop: AbstractEventLoop) -> None: - """Schedule the shutdown of the default executor.""" - - def _do_shutdown(future: asyncio.futures.Future) -> None: - try: - loop._default_executor.shutdown(wait=True) # type: ignore[attr-defined] - loop.call_soon_threadsafe(future.set_result, None) - except Exception as ex: - loop.call_soon_threadsafe(future.set_exception, ex) - - loop._executor_shutdown_called = True - if loop._default_executor is None: - return - future = loop.create_future() - thread = threading.Thread(target=_do_shutdown, args=(future,)) - thread.start() - try: - await future - finally: - thread.join() - - -T_Retval = TypeVar("T_Retval") -T_contra = TypeVar("T_contra", contravariant=True) -PosArgsT = TypeVarTuple("PosArgsT") -P = ParamSpec("P") - -_root_task: RunVar[asyncio.Task | None] = RunVar("_root_task") - - -def find_root_task() -> asyncio.Task: - root_task = _root_task.get(None) - if root_task is not None and not root_task.done(): - return root_task - - # Look for a task that has been started via run_until_complete() - for task in all_tasks(): - if task._callbacks and not task.done(): - callbacks = [cb for cb, context in task._callbacks] - for cb in callbacks: - if ( - cb is _run_until_complete_cb - or getattr(cb, "__module__", None) == "uvloop.loop" - ): - _root_task.set(task) - return task - - # Look up the topmost task in the AnyIO task tree, if possible - task = cast(asyncio.Task, current_task()) - state = _task_states.get(task) - if state: - cancel_scope = state.cancel_scope - while cancel_scope and cancel_scope._parent_scope is not None: - cancel_scope = cancel_scope._parent_scope - - if cancel_scope is not None: - return cast(asyncio.Task, cancel_scope._host_task) - - return task - - -def get_callable_name(func: Callable) -> str: - module = getattr(func, "__module__", None) - qualname = getattr(func, "__qualname__", None) - return ".".join([x for x in (module, qualname) if x]) - - -# -# Event loop -# - -_run_vars: WeakKeyDictionary[asyncio.AbstractEventLoop, Any] = WeakKeyDictionary() - - -def _task_started(task: asyncio.Task) -> bool: - """Return ``True`` if the task has been started and has not finished.""" - try: - return getcoroutinestate(task.get_coro()) in (CORO_RUNNING, CORO_SUSPENDED) - except AttributeError: - # task coro is async_genenerator_asend https://bugs.python.org/issue37771 - raise Exception(f"Cannot determine if task {task} has started or not") from None - - -# -# Timeouts and cancellation -# - - -def is_anyio_cancellation(exc: CancelledError) -> bool: - return ( - bool(exc.args) - and isinstance(exc.args[0], str) - and exc.args[0].startswith("Cancelled by cancel scope ") - ) - - -class CancelScope(BaseCancelScope): - def __new__( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> CancelScope: - return object.__new__(cls) - - def __init__(self, deadline: float = math.inf, shield: bool = False): - self._deadline = deadline - self._shield = shield - self._parent_scope: CancelScope | None = None - self._child_scopes: set[CancelScope] = set() - self._cancel_called = False - self._cancelled_caught = False - self._active = False - self._timeout_handle: asyncio.TimerHandle | None = None - self._cancel_handle: asyncio.Handle | None = None - self._tasks: set[asyncio.Task] = set() - self._host_task: asyncio.Task | None = None - self._cancel_calls: int = 0 - self._cancelling: int | None = None - - def __enter__(self) -> CancelScope: - if self._active: - raise RuntimeError( - "Each CancelScope may only be used for a single 'with' block" - ) - - self._host_task = host_task = cast(asyncio.Task, current_task()) - self._tasks.add(host_task) - try: - task_state = _task_states[host_task] - except KeyError: - task_state = TaskState(None, self) - _task_states[host_task] = task_state - else: - self._parent_scope = task_state.cancel_scope - task_state.cancel_scope = self - if self._parent_scope is not None: - self._parent_scope._child_scopes.add(self) - self._parent_scope._tasks.remove(host_task) - - self._timeout() - self._active = True - if sys.version_info >= (3, 11): - self._cancelling = self._host_task.cancelling() - - # Start cancelling the host task if the scope was cancelled before entering - if self._cancel_called: - self._deliver_cancellation(self) - - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - if not self._active: - raise RuntimeError("This cancel scope is not active") - if current_task() is not self._host_task: - raise RuntimeError( - "Attempted to exit cancel scope in a different task than it was " - "entered in" - ) - - assert self._host_task is not None - host_task_state = _task_states.get(self._host_task) - if host_task_state is None or host_task_state.cancel_scope is not self: - raise RuntimeError( - "Attempted to exit a cancel scope that isn't the current tasks's " - "current cancel scope" - ) - - self._active = False - if self._timeout_handle: - self._timeout_handle.cancel() - self._timeout_handle = None - - self._tasks.remove(self._host_task) - if self._parent_scope is not None: - self._parent_scope._child_scopes.remove(self) - self._parent_scope._tasks.add(self._host_task) - - host_task_state.cancel_scope = self._parent_scope - - # Undo all cancellations done by this scope - if self._cancelling is not None: - while self._cancel_calls: - self._cancel_calls -= 1 - if self._host_task.uncancel() <= self._cancelling: - break - - # We only swallow the exception iff it was an AnyIO CancelledError, either - # directly as exc_val or inside an exception group and there are no cancelled - # parent cancel scopes visible to us here - not_swallowed_exceptions = 0 - swallow_exception = False - if exc_val is not None: - for exc in iterate_exceptions(exc_val): - if self._cancel_called and isinstance(exc, CancelledError): - if not (swallow_exception := self._uncancel(exc)): - not_swallowed_exceptions += 1 - else: - not_swallowed_exceptions += 1 - - # Restart the cancellation effort in the closest visible, cancelled parent - # scope if necessary - self._restart_cancellation_in_parent() - return swallow_exception and not not_swallowed_exceptions - - @property - def _effectively_cancelled(self) -> bool: - cancel_scope: CancelScope | None = self - while cancel_scope is not None: - if cancel_scope._cancel_called: - return True - - if cancel_scope.shield: - return False - - cancel_scope = cancel_scope._parent_scope - - return False - - @property - def _parent_cancellation_is_visible_to_us(self) -> bool: - return ( - self._parent_scope is not None - and not self.shield - and self._parent_scope._effectively_cancelled - ) - - def _uncancel(self, cancelled_exc: CancelledError) -> bool: - if self._host_task is None: - self._cancel_calls = 0 - return True - - while True: - if is_anyio_cancellation(cancelled_exc): - # Only swallow the cancellation exception if it's an AnyIO cancel - # exception and there are no other cancel scopes down the line pending - # cancellation - self._cancelled_caught = ( - self._effectively_cancelled - and not self._parent_cancellation_is_visible_to_us - ) - return self._cancelled_caught - - # Sometimes third party frameworks catch a CancelledError and raise a new - # one, so as a workaround we have to look at the previous ones in - # __context__ too for a matching cancel message - if isinstance(cancelled_exc.__context__, CancelledError): - cancelled_exc = cancelled_exc.__context__ - continue - - return False - - def _timeout(self) -> None: - if self._deadline != math.inf: - loop = get_running_loop() - if loop.time() >= self._deadline: - self.cancel() - else: - self._timeout_handle = loop.call_at(self._deadline, self._timeout) - - def _deliver_cancellation(self, origin: CancelScope) -> bool: - """ - Deliver cancellation to directly contained tasks and nested cancel scopes. - - Schedule another run at the end if we still have tasks eligible for - cancellation. - - :param origin: the cancel scope that originated the cancellation - :return: ``True`` if the delivery needs to be retried on the next cycle - - """ - should_retry = False - current = current_task() - for task in self._tasks: - should_retry = True - if task._must_cancel: # type: ignore[attr-defined] - continue - - # The task is eligible for cancellation if it has started - if task is not current and (task is self._host_task or _task_started(task)): - waiter = task._fut_waiter # type: ignore[attr-defined] - if not isinstance(waiter, asyncio.Future) or not waiter.done(): - task.cancel(f"Cancelled by cancel scope {id(origin):x}") - if task is origin._host_task: - origin._cancel_calls += 1 - - # Deliver cancellation to child scopes that aren't shielded or running their own - # cancellation callbacks - for scope in self._child_scopes: - if not scope._shield and not scope.cancel_called: - should_retry = scope._deliver_cancellation(origin) or should_retry - - # Schedule another callback if there are still tasks left - if origin is self: - if should_retry: - self._cancel_handle = get_running_loop().call_soon( - self._deliver_cancellation, origin - ) - else: - self._cancel_handle = None - - return should_retry - - def _restart_cancellation_in_parent(self) -> None: - """ - Restart the cancellation effort in the closest directly cancelled parent scope. - - """ - scope = self._parent_scope - while scope is not None: - if scope._cancel_called: - if scope._cancel_handle is None: - scope._deliver_cancellation(scope) - - break - - # No point in looking beyond any shielded scope - if scope._shield: - break - - scope = scope._parent_scope - - def cancel(self) -> None: - if not self._cancel_called: - if self._timeout_handle: - self._timeout_handle.cancel() - self._timeout_handle = None - - self._cancel_called = True - if self._host_task is not None: - self._deliver_cancellation(self) - - @property - def deadline(self) -> float: - return self._deadline - - @deadline.setter - def deadline(self, value: float) -> None: - self._deadline = float(value) - if self._timeout_handle is not None: - self._timeout_handle.cancel() - self._timeout_handle = None - - if self._active and not self._cancel_called: - self._timeout() - - @property - def cancel_called(self) -> bool: - return self._cancel_called - - @property - def cancelled_caught(self) -> bool: - return self._cancelled_caught - - @property - def shield(self) -> bool: - return self._shield - - @shield.setter - def shield(self, value: bool) -> None: - if self._shield != value: - self._shield = value - if not value: - self._restart_cancellation_in_parent() - - -# -# Task states -# - - -class TaskState: - """ - Encapsulates auxiliary task information that cannot be added to the Task instance - itself because there are no guarantees about its implementation. - """ - - __slots__ = "parent_id", "cancel_scope", "__weakref__" - - def __init__(self, parent_id: int | None, cancel_scope: CancelScope | None): - self.parent_id = parent_id - self.cancel_scope = cancel_scope - - -_task_states: WeakKeyDictionary[asyncio.Task, TaskState] = WeakKeyDictionary() - - -# -# Task groups -# - - -class _AsyncioTaskStatus(abc.TaskStatus): - def __init__(self, future: asyncio.Future, parent_id: int): - self._future = future - self._parent_id = parent_id - - def started(self, value: T_contra | None = None) -> None: - try: - self._future.set_result(value) - except asyncio.InvalidStateError: - if not self._future.cancelled(): - raise RuntimeError( - "called 'started' twice on the same task status" - ) from None - - task = cast(asyncio.Task, current_task()) - _task_states[task].parent_id = self._parent_id - - -class TaskGroup(abc.TaskGroup): - def __init__(self) -> None: - self.cancel_scope: CancelScope = CancelScope() - self._active = False - self._exceptions: list[BaseException] = [] - self._tasks: set[asyncio.Task] = set() - - async def __aenter__(self) -> TaskGroup: - self.cancel_scope.__enter__() - self._active = True - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - if exc_val is not None: - self.cancel_scope.cancel() - if not isinstance(exc_val, CancelledError): - self._exceptions.append(exc_val) - - try: - if self._tasks: - with CancelScope() as wait_scope: - while self._tasks: - try: - await asyncio.wait(self._tasks) - except CancelledError as exc: - # Shield the scope against further cancellation attempts, - # as they're not productive (#695) - wait_scope.shield = True - self.cancel_scope.cancel() - - # Set exc_val from the cancellation exception if it was - # previously unset. However, we should not replace a native - # cancellation exception with one raise by a cancel scope. - if exc_val is None or ( - isinstance(exc_val, CancelledError) - and not is_anyio_cancellation(exc) - ): - exc_val = exc - else: - # If there are no child tasks to wait on, run at least one checkpoint - # anyway - await AsyncIOBackend.cancel_shielded_checkpoint() - - self._active = False - if self._exceptions: - raise BaseExceptionGroup( - "unhandled errors in a TaskGroup", self._exceptions - ) - elif exc_val: - raise exc_val - except BaseException as exc: - if self.cancel_scope.__exit__(type(exc), exc, exc.__traceback__): - return True - - raise - - return self.cancel_scope.__exit__(exc_type, exc_val, exc_tb) - - def _spawn( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], - args: tuple[Unpack[PosArgsT]], - name: object, - task_status_future: asyncio.Future | None = None, - ) -> asyncio.Task: - def task_done(_task: asyncio.Task) -> None: - task_state = _task_states[_task] - assert task_state.cancel_scope is not None - assert _task in task_state.cancel_scope._tasks - task_state.cancel_scope._tasks.remove(_task) - self._tasks.remove(task) - del _task_states[_task] - - try: - exc = _task.exception() - except CancelledError as e: - while isinstance(e.__context__, CancelledError): - e = e.__context__ - - exc = e - - if exc is not None: - # The future can only be in the cancelled state if the host task was - # cancelled, so return immediately instead of adding one more - # CancelledError to the exceptions list - if task_status_future is not None and task_status_future.cancelled(): - return - - if task_status_future is None or task_status_future.done(): - if not isinstance(exc, CancelledError): - self._exceptions.append(exc) - - if not self.cancel_scope._effectively_cancelled: - self.cancel_scope.cancel() - else: - task_status_future.set_exception(exc) - elif task_status_future is not None and not task_status_future.done(): - task_status_future.set_exception( - RuntimeError("Child exited without calling task_status.started()") - ) - - if not self._active: - raise RuntimeError( - "This task group is not active; no new tasks can be started." - ) - - kwargs = {} - if task_status_future: - parent_id = id(current_task()) - kwargs["task_status"] = _AsyncioTaskStatus( - task_status_future, id(self.cancel_scope._host_task) - ) - else: - parent_id = id(self.cancel_scope._host_task) - - coro = func(*args, **kwargs) - if not iscoroutine(coro): - prefix = f"{func.__module__}." if hasattr(func, "__module__") else "" - raise TypeError( - f"Expected {prefix}{func.__qualname__}() to return a coroutine, but " - f"the return value ({coro!r}) is not a coroutine object" - ) - - name = get_callable_name(func) if name is None else str(name) - task = create_task(coro, name=name) - task.add_done_callback(task_done) - - # Make the spawned task inherit the task group's cancel scope - _task_states[task] = TaskState( - parent_id=parent_id, cancel_scope=self.cancel_scope - ) - self.cancel_scope._tasks.add(task) - self._tasks.add(task) - return task - - def start_soon( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], - *args: Unpack[PosArgsT], - name: object = None, - ) -> None: - self._spawn(func, args, name) - - async def start( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> Any: - future: asyncio.Future = asyncio.Future() - task = self._spawn(func, args, name, future) - - # If the task raises an exception after sending a start value without a switch - # point between, the task group is cancelled and this method never proceeds to - # process the completed future. That's why we have to have a shielded cancel - # scope here. - try: - return await future - except CancelledError: - # Cancel the task and wait for it to exit before returning - task.cancel() - with CancelScope(shield=True), suppress(CancelledError): - await task - - raise - - -# -# Threads -# - -_Retval_Queue_Type = tuple[Optional[T_Retval], Optional[BaseException]] - - -class WorkerThread(Thread): - MAX_IDLE_TIME = 10 # seconds - - def __init__( - self, - root_task: asyncio.Task, - workers: set[WorkerThread], - idle_workers: deque[WorkerThread], - ): - super().__init__(name="AnyIO worker thread") - self.root_task = root_task - self.workers = workers - self.idle_workers = idle_workers - self.loop = root_task._loop - self.queue: Queue[ - tuple[Context, Callable, tuple, asyncio.Future, CancelScope] | None - ] = Queue(2) - self.idle_since = AsyncIOBackend.current_time() - self.stopping = False - - def _report_result( - self, future: asyncio.Future, result: Any, exc: BaseException | None - ) -> None: - self.idle_since = AsyncIOBackend.current_time() - if not self.stopping: - self.idle_workers.append(self) - - if not future.cancelled(): - if exc is not None: - if isinstance(exc, StopIteration): - new_exc = RuntimeError("coroutine raised StopIteration") - new_exc.__cause__ = exc - exc = new_exc - - future.set_exception(exc) - else: - future.set_result(result) - - def run(self) -> None: - with claim_worker_thread(AsyncIOBackend, self.loop): - while True: - item = self.queue.get() - if item is None: - # Shutdown command received - return - - context, func, args, future, cancel_scope = item - if not future.cancelled(): - result = None - exception: BaseException | None = None - threadlocals.current_cancel_scope = cancel_scope - try: - result = context.run(func, *args) - except BaseException as exc: - exception = exc - finally: - del threadlocals.current_cancel_scope - - if not self.loop.is_closed(): - self.loop.call_soon_threadsafe( - self._report_result, future, result, exception - ) - - self.queue.task_done() - - def stop(self, f: asyncio.Task | None = None) -> None: - self.stopping = True - self.queue.put_nowait(None) - self.workers.discard(self) - try: - self.idle_workers.remove(self) - except ValueError: - pass - - -_threadpool_idle_workers: RunVar[deque[WorkerThread]] = RunVar( - "_threadpool_idle_workers" -) -_threadpool_workers: RunVar[set[WorkerThread]] = RunVar("_threadpool_workers") - - -class BlockingPortal(abc.BlockingPortal): - def __new__(cls) -> BlockingPortal: - return object.__new__(cls) - - def __init__(self) -> None: - super().__init__() - self._loop = get_running_loop() - - def _spawn_task_from_thread( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - name: object, - future: Future[T_Retval], - ) -> None: - AsyncIOBackend.run_sync_from_thread( - partial(self._task_group.start_soon, name=name), - (self._call_func, func, args, kwargs, future), - self._loop, - ) - - -# -# Subprocesses -# - - -@dataclass(eq=False) -class StreamReaderWrapper(abc.ByteReceiveStream): - _stream: asyncio.StreamReader - - async def receive(self, max_bytes: int = 65536) -> bytes: - data = await self._stream.read(max_bytes) - if data: - return data - else: - raise EndOfStream - - async def aclose(self) -> None: - self._stream.set_exception(ClosedResourceError()) - await AsyncIOBackend.checkpoint() - - -@dataclass(eq=False) -class StreamWriterWrapper(abc.ByteSendStream): - _stream: asyncio.StreamWriter - - async def send(self, item: bytes) -> None: - self._stream.write(item) - await self._stream.drain() - - async def aclose(self) -> None: - self._stream.close() - await AsyncIOBackend.checkpoint() - - -@dataclass(eq=False) -class Process(abc.Process): - _process: asyncio.subprocess.Process - _stdin: StreamWriterWrapper | None - _stdout: StreamReaderWrapper | None - _stderr: StreamReaderWrapper | None - - async def aclose(self) -> None: - with CancelScope(shield=True) as scope: - if self._stdin: - await self._stdin.aclose() - if self._stdout: - await self._stdout.aclose() - if self._stderr: - await self._stderr.aclose() - - scope.shield = False - try: - await self.wait() - except BaseException: - scope.shield = True - self.kill() - await self.wait() - raise - - async def wait(self) -> int: - return await self._process.wait() - - def terminate(self) -> None: - self._process.terminate() - - def kill(self) -> None: - self._process.kill() - - def send_signal(self, signal: int) -> None: - self._process.send_signal(signal) - - @property - def pid(self) -> int: - return self._process.pid - - @property - def returncode(self) -> int | None: - return self._process.returncode - - @property - def stdin(self) -> abc.ByteSendStream | None: - return self._stdin - - @property - def stdout(self) -> abc.ByteReceiveStream | None: - return self._stdout - - @property - def stderr(self) -> abc.ByteReceiveStream | None: - return self._stderr - - -def _forcibly_shutdown_process_pool_on_exit( - workers: set[Process], _task: object -) -> None: - """ - Forcibly shuts down worker processes belonging to this event loop.""" - child_watcher: asyncio.AbstractChildWatcher | None = None - if sys.version_info < (3, 12): - try: - child_watcher = asyncio.get_event_loop_policy().get_child_watcher() - except NotImplementedError: - pass - - # Close as much as possible (w/o async/await) to avoid warnings - for process in workers: - if process.returncode is None: - continue - - process._stdin._stream._transport.close() # type: ignore[union-attr] - process._stdout._stream._transport.close() # type: ignore[union-attr] - process._stderr._stream._transport.close() # type: ignore[union-attr] - process.kill() - if child_watcher: - child_watcher.remove_child_handler(process.pid) - - -async def _shutdown_process_pool_on_exit(workers: set[abc.Process]) -> None: - """ - Shuts down worker processes belonging to this event loop. - - NOTE: this only works when the event loop was started using asyncio.run() or - anyio.run(). - - """ - process: abc.Process - try: - await sleep(math.inf) - except asyncio.CancelledError: - for process in workers: - if process.returncode is None: - process.kill() - - for process in workers: - await process.aclose() - - -# -# Sockets and networking -# - - -class StreamProtocol(asyncio.Protocol): - read_queue: deque[bytes] - read_event: asyncio.Event - write_event: asyncio.Event - exception: Exception | None = None - is_at_eof: bool = False - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - self.read_queue = deque() - self.read_event = asyncio.Event() - self.write_event = asyncio.Event() - self.write_event.set() - cast(asyncio.Transport, transport).set_write_buffer_limits(0) - - def connection_lost(self, exc: Exception | None) -> None: - if exc: - self.exception = BrokenResourceError() - self.exception.__cause__ = exc - - self.read_event.set() - self.write_event.set() - - def data_received(self, data: bytes) -> None: - # ProactorEventloop sometimes sends bytearray instead of bytes - self.read_queue.append(bytes(data)) - self.read_event.set() - - def eof_received(self) -> bool | None: - self.is_at_eof = True - self.read_event.set() - return True - - def pause_writing(self) -> None: - self.write_event = asyncio.Event() - - def resume_writing(self) -> None: - self.write_event.set() - - -class DatagramProtocol(asyncio.DatagramProtocol): - read_queue: deque[tuple[bytes, IPSockAddrType]] - read_event: asyncio.Event - write_event: asyncio.Event - exception: Exception | None = None - - def connection_made(self, transport: asyncio.BaseTransport) -> None: - self.read_queue = deque(maxlen=100) # arbitrary value - self.read_event = asyncio.Event() - self.write_event = asyncio.Event() - self.write_event.set() - - def connection_lost(self, exc: Exception | None) -> None: - self.read_event.set() - self.write_event.set() - - def datagram_received(self, data: bytes, addr: IPSockAddrType) -> None: - addr = convert_ipv6_sockaddr(addr) - self.read_queue.append((data, addr)) - self.read_event.set() - - def error_received(self, exc: Exception) -> None: - self.exception = exc - - def pause_writing(self) -> None: - self.write_event.clear() - - def resume_writing(self) -> None: - self.write_event.set() - - -class SocketStream(abc.SocketStream): - def __init__(self, transport: asyncio.Transport, protocol: StreamProtocol): - self._transport = transport - self._protocol = protocol - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - self._closed = False - - @property - def _raw_socket(self) -> socket.socket: - return self._transport.get_extra_info("socket") - - async def receive(self, max_bytes: int = 65536) -> bytes: - with self._receive_guard: - if ( - not self._protocol.read_event.is_set() - and not self._transport.is_closing() - and not self._protocol.is_at_eof - ): - self._transport.resume_reading() - await self._protocol.read_event.wait() - self._transport.pause_reading() - else: - await AsyncIOBackend.checkpoint() - - try: - chunk = self._protocol.read_queue.popleft() - except IndexError: - if self._closed: - raise ClosedResourceError from None - elif self._protocol.exception: - raise self._protocol.exception from None - else: - raise EndOfStream from None - - if len(chunk) > max_bytes: - # Split the oversized chunk - chunk, leftover = chunk[:max_bytes], chunk[max_bytes:] - self._protocol.read_queue.appendleft(leftover) - - # If the read queue is empty, clear the flag so that the next call will - # block until data is available - if not self._protocol.read_queue: - self._protocol.read_event.clear() - - return chunk - - async def send(self, item: bytes) -> None: - with self._send_guard: - await AsyncIOBackend.checkpoint() - - if self._closed: - raise ClosedResourceError - elif self._protocol.exception is not None: - raise self._protocol.exception - - try: - self._transport.write(item) - except RuntimeError as exc: - if self._transport.is_closing(): - raise BrokenResourceError from exc - else: - raise - - await self._protocol.write_event.wait() - - async def send_eof(self) -> None: - try: - self._transport.write_eof() - except OSError: - pass - - async def aclose(self) -> None: - if not self._transport.is_closing(): - self._closed = True - try: - self._transport.write_eof() - except OSError: - pass - - self._transport.close() - await sleep(0) - self._transport.abort() - - -class _RawSocketMixin: - _receive_future: asyncio.Future | None = None - _send_future: asyncio.Future | None = None - _closing = False - - def __init__(self, raw_socket: socket.socket): - self.__raw_socket = raw_socket - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - @property - def _raw_socket(self) -> socket.socket: - return self.__raw_socket - - def _wait_until_readable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: - def callback(f: object) -> None: - del self._receive_future - loop.remove_reader(self.__raw_socket) - - f = self._receive_future = asyncio.Future() - loop.add_reader(self.__raw_socket, f.set_result, None) - f.add_done_callback(callback) - return f - - def _wait_until_writable(self, loop: asyncio.AbstractEventLoop) -> asyncio.Future: - def callback(f: object) -> None: - del self._send_future - loop.remove_writer(self.__raw_socket) - - f = self._send_future = asyncio.Future() - loop.add_writer(self.__raw_socket, f.set_result, None) - f.add_done_callback(callback) - return f - - async def aclose(self) -> None: - if not self._closing: - self._closing = True - if self.__raw_socket.fileno() != -1: - self.__raw_socket.close() - - if self._receive_future: - self._receive_future.set_result(None) - if self._send_future: - self._send_future.set_result(None) - - -class UNIXSocketStream(_RawSocketMixin, abc.UNIXSocketStream): - async def send_eof(self) -> None: - with self._send_guard: - self._raw_socket.shutdown(socket.SHUT_WR) - - async def receive(self, max_bytes: int = 65536) -> bytes: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._receive_guard: - while True: - try: - data = self._raw_socket.recv(max_bytes) - except BlockingIOError: - await self._wait_until_readable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - if not data: - raise EndOfStream - - return data - - async def send(self, item: bytes) -> None: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._send_guard: - view = memoryview(item) - while view: - try: - bytes_sent = self._raw_socket.send(view) - except BlockingIOError: - await self._wait_until_writable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - view = view[bytes_sent:] - - async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: - if not isinstance(msglen, int) or msglen < 0: - raise ValueError("msglen must be a non-negative integer") - if not isinstance(maxfds, int) or maxfds < 1: - raise ValueError("maxfds must be a positive integer") - - loop = get_running_loop() - fds = array.array("i") - await AsyncIOBackend.checkpoint() - with self._receive_guard: - while True: - try: - message, ancdata, flags, addr = self._raw_socket.recvmsg( - msglen, socket.CMSG_LEN(maxfds * fds.itemsize) - ) - except BlockingIOError: - await self._wait_until_readable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - if not message and not ancdata: - raise EndOfStream - - break - - for cmsg_level, cmsg_type, cmsg_data in ancdata: - if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: - raise RuntimeError( - f"Received unexpected ancillary data; message = {message!r}, " - f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" - ) - - fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) - - return message, list(fds) - - async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: - if not message: - raise ValueError("message must not be empty") - if not fds: - raise ValueError("fds must not be empty") - - loop = get_running_loop() - filenos: list[int] = [] - for fd in fds: - if isinstance(fd, int): - filenos.append(fd) - elif isinstance(fd, IOBase): - filenos.append(fd.fileno()) - - fdarray = array.array("i", filenos) - await AsyncIOBackend.checkpoint() - with self._send_guard: - while True: - try: - # The ignore can be removed after mypy picks up - # https://github.com/python/typeshed/pull/5545 - self._raw_socket.sendmsg( - [message], [(socket.SOL_SOCKET, socket.SCM_RIGHTS, fdarray)] - ) - break - except BlockingIOError: - await self._wait_until_writable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - - -class TCPSocketListener(abc.SocketListener): - _accept_scope: CancelScope | None = None - _closed = False - - def __init__(self, raw_socket: socket.socket): - self.__raw_socket = raw_socket - self._loop = cast(asyncio.BaseEventLoop, get_running_loop()) - self._accept_guard = ResourceGuard("accepting connections from") - - @property - def _raw_socket(self) -> socket.socket: - return self.__raw_socket - - async def accept(self) -> abc.SocketStream: - if self._closed: - raise ClosedResourceError - - with self._accept_guard: - await AsyncIOBackend.checkpoint() - with CancelScope() as self._accept_scope: - try: - client_sock, _addr = await self._loop.sock_accept(self._raw_socket) - except asyncio.CancelledError: - # Workaround for https://bugs.python.org/issue41317 - try: - self._loop.remove_reader(self._raw_socket) - except (ValueError, NotImplementedError): - pass - - if self._closed: - raise ClosedResourceError from None - - raise - finally: - self._accept_scope = None - - client_sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - transport, protocol = await self._loop.connect_accepted_socket( - StreamProtocol, client_sock - ) - return SocketStream(transport, protocol) - - async def aclose(self) -> None: - if self._closed: - return - - self._closed = True - if self._accept_scope: - # Workaround for https://bugs.python.org/issue41317 - try: - self._loop.remove_reader(self._raw_socket) - except (ValueError, NotImplementedError): - pass - - self._accept_scope.cancel() - await sleep(0) - - self._raw_socket.close() - - -class UNIXSocketListener(abc.SocketListener): - def __init__(self, raw_socket: socket.socket): - self.__raw_socket = raw_socket - self._loop = get_running_loop() - self._accept_guard = ResourceGuard("accepting connections from") - self._closed = False - - async def accept(self) -> abc.SocketStream: - await AsyncIOBackend.checkpoint() - with self._accept_guard: - while True: - try: - client_sock, _ = self.__raw_socket.accept() - client_sock.setblocking(False) - return UNIXSocketStream(client_sock) - except BlockingIOError: - f: asyncio.Future = asyncio.Future() - self._loop.add_reader(self.__raw_socket, f.set_result, None) - f.add_done_callback( - lambda _: self._loop.remove_reader(self.__raw_socket) - ) - await f - except OSError as exc: - if self._closed: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - - async def aclose(self) -> None: - self._closed = True - self.__raw_socket.close() - - @property - def _raw_socket(self) -> socket.socket: - return self.__raw_socket - - -class UDPSocket(abc.UDPSocket): - def __init__( - self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol - ): - self._transport = transport - self._protocol = protocol - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - self._closed = False - - @property - def _raw_socket(self) -> socket.socket: - return self._transport.get_extra_info("socket") - - async def aclose(self) -> None: - if not self._transport.is_closing(): - self._closed = True - self._transport.close() - - async def receive(self) -> tuple[bytes, IPSockAddrType]: - with self._receive_guard: - await AsyncIOBackend.checkpoint() - - # If the buffer is empty, ask for more data - if not self._protocol.read_queue and not self._transport.is_closing(): - self._protocol.read_event.clear() - await self._protocol.read_event.wait() - - try: - return self._protocol.read_queue.popleft() - except IndexError: - if self._closed: - raise ClosedResourceError from None - else: - raise BrokenResourceError from None - - async def send(self, item: UDPPacketType) -> None: - with self._send_guard: - await AsyncIOBackend.checkpoint() - await self._protocol.write_event.wait() - if self._closed: - raise ClosedResourceError - elif self._transport.is_closing(): - raise BrokenResourceError - else: - self._transport.sendto(*item) - - -class ConnectedUDPSocket(abc.ConnectedUDPSocket): - def __init__( - self, transport: asyncio.DatagramTransport, protocol: DatagramProtocol - ): - self._transport = transport - self._protocol = protocol - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - self._closed = False - - @property - def _raw_socket(self) -> socket.socket: - return self._transport.get_extra_info("socket") - - async def aclose(self) -> None: - if not self._transport.is_closing(): - self._closed = True - self._transport.close() - - async def receive(self) -> bytes: - with self._receive_guard: - await AsyncIOBackend.checkpoint() - - # If the buffer is empty, ask for more data - if not self._protocol.read_queue and not self._transport.is_closing(): - self._protocol.read_event.clear() - await self._protocol.read_event.wait() - - try: - packet = self._protocol.read_queue.popleft() - except IndexError: - if self._closed: - raise ClosedResourceError from None - else: - raise BrokenResourceError from None - - return packet[0] - - async def send(self, item: bytes) -> None: - with self._send_guard: - await AsyncIOBackend.checkpoint() - await self._protocol.write_event.wait() - if self._closed: - raise ClosedResourceError - elif self._transport.is_closing(): - raise BrokenResourceError - else: - self._transport.sendto(item) - - -class UNIXDatagramSocket(_RawSocketMixin, abc.UNIXDatagramSocket): - async def receive(self) -> UNIXDatagramPacketType: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._receive_guard: - while True: - try: - data = self._raw_socket.recvfrom(65536) - except BlockingIOError: - await self._wait_until_readable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - return data - - async def send(self, item: UNIXDatagramPacketType) -> None: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._send_guard: - while True: - try: - self._raw_socket.sendto(*item) - except BlockingIOError: - await self._wait_until_writable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - return - - -class ConnectedUNIXDatagramSocket(_RawSocketMixin, abc.ConnectedUNIXDatagramSocket): - async def receive(self) -> bytes: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._receive_guard: - while True: - try: - data = self._raw_socket.recv(65536) - except BlockingIOError: - await self._wait_until_readable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - return data - - async def send(self, item: bytes) -> None: - loop = get_running_loop() - await AsyncIOBackend.checkpoint() - with self._send_guard: - while True: - try: - self._raw_socket.send(item) - except BlockingIOError: - await self._wait_until_writable(loop) - except OSError as exc: - if self._closing: - raise ClosedResourceError from None - else: - raise BrokenResourceError from exc - else: - return - - -_read_events: RunVar[dict[Any, asyncio.Event]] = RunVar("read_events") -_write_events: RunVar[dict[Any, asyncio.Event]] = RunVar("write_events") - - -# -# Synchronization -# - - -class Event(BaseEvent): - def __new__(cls) -> Event: - return object.__new__(cls) - - def __init__(self) -> None: - self._event = asyncio.Event() - - def set(self) -> None: - self._event.set() - - def is_set(self) -> bool: - return self._event.is_set() - - async def wait(self) -> None: - if self.is_set(): - await AsyncIOBackend.checkpoint() - else: - await self._event.wait() - - def statistics(self) -> EventStatistics: - return EventStatistics(len(self._event._waiters)) - - -class Lock(BaseLock): - def __new__(cls, *, fast_acquire: bool = False) -> Lock: - return object.__new__(cls) - - def __init__(self, *, fast_acquire: bool = False) -> None: - self._fast_acquire = fast_acquire - self._owner_task: asyncio.Task | None = None - self._waiters: deque[tuple[asyncio.Task, asyncio.Future]] = deque() - - async def acquire(self) -> None: - if self._owner_task is None and not self._waiters: - await AsyncIOBackend.checkpoint_if_cancelled() - self._owner_task = current_task() - - # Unless on the "fast path", yield control of the event loop so that other - # tasks can run too - if not self._fast_acquire: - try: - await AsyncIOBackend.cancel_shielded_checkpoint() - except CancelledError: - self.release() - raise - - return - - task = cast(asyncio.Task, current_task()) - fut: asyncio.Future[None] = asyncio.Future() - item = task, fut - self._waiters.append(item) - try: - await fut - except CancelledError: - self._waiters.remove(item) - if self._owner_task is task: - self.release() - - raise - - self._waiters.remove(item) - - def acquire_nowait(self) -> None: - if self._owner_task is None and not self._waiters: - self._owner_task = current_task() - return - - raise WouldBlock - - def locked(self) -> bool: - return self._owner_task is not None - - def release(self) -> None: - if self._owner_task != current_task(): - raise RuntimeError("The current task is not holding this lock") - - for task, fut in self._waiters: - if not fut.cancelled(): - self._owner_task = task - fut.set_result(None) - return - - self._owner_task = None - - def statistics(self) -> LockStatistics: - task_info = AsyncIOTaskInfo(self._owner_task) if self._owner_task else None - return LockStatistics(self.locked(), task_info, len(self._waiters)) - - -class Semaphore(BaseSemaphore): - def __new__( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> Semaphore: - return object.__new__(cls) - - def __init__( - self, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ): - super().__init__(initial_value, max_value=max_value) - self._value = initial_value - self._max_value = max_value - self._fast_acquire = fast_acquire - self._waiters: deque[asyncio.Future[None]] = deque() - - async def acquire(self) -> None: - if self._value > 0 and not self._waiters: - await AsyncIOBackend.checkpoint_if_cancelled() - self._value -= 1 - - # Unless on the "fast path", yield control of the event loop so that other - # tasks can run too - if not self._fast_acquire: - try: - await AsyncIOBackend.cancel_shielded_checkpoint() - except CancelledError: - self.release() - raise - - return - - fut: asyncio.Future[None] = asyncio.Future() - self._waiters.append(fut) - try: - await fut - except CancelledError: - try: - self._waiters.remove(fut) - except ValueError: - self.release() - - raise - - def acquire_nowait(self) -> None: - if self._value == 0: - raise WouldBlock - - self._value -= 1 - - def release(self) -> None: - if self._max_value is not None and self._value == self._max_value: - raise ValueError("semaphore released too many times") - - for fut in self._waiters: - if not fut.cancelled(): - fut.set_result(None) - self._waiters.remove(fut) - return - - self._value += 1 - - @property - def value(self) -> int: - return self._value - - @property - def max_value(self) -> int | None: - return self._max_value - - def statistics(self) -> SemaphoreStatistics: - return SemaphoreStatistics(len(self._waiters)) - - -class CapacityLimiter(BaseCapacityLimiter): - _total_tokens: float = 0 - - def __new__(cls, total_tokens: float) -> CapacityLimiter: - return object.__new__(cls) - - def __init__(self, total_tokens: float): - self._borrowers: set[Any] = set() - self._wait_queue: OrderedDict[Any, asyncio.Event] = OrderedDict() - self.total_tokens = total_tokens - - async def __aenter__(self) -> None: - await self.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - @property - def total_tokens(self) -> float: - return self._total_tokens - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - if not isinstance(value, int) and not math.isinf(value): - raise TypeError("total_tokens must be an int or math.inf") - if value < 1: - raise ValueError("total_tokens must be >= 1") - - waiters_to_notify = max(value - self._total_tokens, 0) - self._total_tokens = value - - # Notify waiting tasks that they have acquired the limiter - while self._wait_queue and waiters_to_notify: - event = self._wait_queue.popitem(last=False)[1] - event.set() - waiters_to_notify -= 1 - - @property - def borrowed_tokens(self) -> int: - return len(self._borrowers) - - @property - def available_tokens(self) -> float: - return self._total_tokens - len(self._borrowers) - - def acquire_nowait(self) -> None: - self.acquire_on_behalf_of_nowait(current_task()) - - def acquire_on_behalf_of_nowait(self, borrower: object) -> None: - if borrower in self._borrowers: - raise RuntimeError( - "this borrower is already holding one of this CapacityLimiter's " - "tokens" - ) - - if self._wait_queue or len(self._borrowers) >= self._total_tokens: - raise WouldBlock - - self._borrowers.add(borrower) - - async def acquire(self) -> None: - return await self.acquire_on_behalf_of(current_task()) - - async def acquire_on_behalf_of(self, borrower: object) -> None: - await AsyncIOBackend.checkpoint_if_cancelled() - try: - self.acquire_on_behalf_of_nowait(borrower) - except WouldBlock: - event = asyncio.Event() - self._wait_queue[borrower] = event - try: - await event.wait() - except BaseException: - self._wait_queue.pop(borrower, None) - raise - - self._borrowers.add(borrower) - else: - try: - await AsyncIOBackend.cancel_shielded_checkpoint() - except BaseException: - self.release() - raise - - def release(self) -> None: - self.release_on_behalf_of(current_task()) - - def release_on_behalf_of(self, borrower: object) -> None: - try: - self._borrowers.remove(borrower) - except KeyError: - raise RuntimeError( - "this borrower isn't holding any of this CapacityLimiter's tokens" - ) from None - - # Notify the next task in line if this limiter has free capacity now - if self._wait_queue and len(self._borrowers) < self._total_tokens: - event = self._wait_queue.popitem(last=False)[1] - event.set() - - def statistics(self) -> CapacityLimiterStatistics: - return CapacityLimiterStatistics( - self.borrowed_tokens, - self.total_tokens, - tuple(self._borrowers), - len(self._wait_queue), - ) - - -_default_thread_limiter: RunVar[CapacityLimiter] = RunVar("_default_thread_limiter") - - -# -# Operating system signals -# - - -class _SignalReceiver: - def __init__(self, signals: tuple[Signals, ...]): - self._signals = signals - self._loop = get_running_loop() - self._signal_queue: deque[Signals] = deque() - self._future: asyncio.Future = asyncio.Future() - self._handled_signals: set[Signals] = set() - - def _deliver(self, signum: Signals) -> None: - self._signal_queue.append(signum) - if not self._future.done(): - self._future.set_result(None) - - def __enter__(self) -> _SignalReceiver: - for sig in set(self._signals): - self._loop.add_signal_handler(sig, self._deliver, sig) - self._handled_signals.add(sig) - - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - for sig in self._handled_signals: - self._loop.remove_signal_handler(sig) - return None - - def __aiter__(self) -> _SignalReceiver: - return self - - async def __anext__(self) -> Signals: - await AsyncIOBackend.checkpoint() - if not self._signal_queue: - self._future = asyncio.Future() - await self._future - - return self._signal_queue.popleft() - - -# -# Testing and debugging -# - - -class AsyncIOTaskInfo(TaskInfo): - def __init__(self, task: asyncio.Task): - task_state = _task_states.get(task) - if task_state is None: - parent_id = None - else: - parent_id = task_state.parent_id - - super().__init__(id(task), parent_id, task.get_name(), task.get_coro()) - self._task = weakref.ref(task) - - def has_pending_cancellation(self) -> bool: - if not (task := self._task()): - # If the task isn't around anymore, it won't have a pending cancellation - return False - - if sys.version_info >= (3, 11): - if task.cancelling(): - return True - elif ( - isinstance(task._fut_waiter, asyncio.Future) - and task._fut_waiter.cancelled() - ): - return True - - if task_state := _task_states.get(task): - if cancel_scope := task_state.cancel_scope: - return cancel_scope._effectively_cancelled - - return False - - -class TestRunner(abc.TestRunner): - _send_stream: MemoryObjectSendStream[tuple[Awaitable[Any], asyncio.Future[Any]]] - - def __init__( - self, - *, - debug: bool | None = None, - use_uvloop: bool = False, - loop_factory: Callable[[], AbstractEventLoop] | None = None, - ) -> None: - if use_uvloop and loop_factory is None: - import uvloop - - loop_factory = uvloop.new_event_loop - - self._runner = Runner(debug=debug, loop_factory=loop_factory) - self._exceptions: list[BaseException] = [] - self._runner_task: asyncio.Task | None = None - - def __enter__(self) -> TestRunner: - self._runner.__enter__() - self.get_loop().set_exception_handler(self._exception_handler) - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self._runner.__exit__(exc_type, exc_val, exc_tb) - - def get_loop(self) -> AbstractEventLoop: - return self._runner.get_loop() - - def _exception_handler( - self, loop: asyncio.AbstractEventLoop, context: dict[str, Any] - ) -> None: - if isinstance(context.get("exception"), Exception): - self._exceptions.append(context["exception"]) - else: - loop.default_exception_handler(context) - - def _raise_async_exceptions(self) -> None: - # Re-raise any exceptions raised in asynchronous callbacks - if self._exceptions: - exceptions, self._exceptions = self._exceptions, [] - if len(exceptions) == 1: - raise exceptions[0] - elif exceptions: - raise BaseExceptionGroup( - "Multiple exceptions occurred in asynchronous callbacks", exceptions - ) - - async def _run_tests_and_fixtures( - self, - receive_stream: MemoryObjectReceiveStream[ - tuple[Awaitable[T_Retval], asyncio.Future[T_Retval]] - ], - ) -> None: - from _pytest.outcomes import OutcomeException - - with receive_stream, self._send_stream: - async for coro, future in receive_stream: - try: - retval = await coro - except CancelledError as exc: - if not future.cancelled(): - future.cancel(*exc.args) - - raise - except BaseException as exc: - if not future.cancelled(): - future.set_exception(exc) - - if not isinstance(exc, (Exception, OutcomeException)): - raise - else: - if not future.cancelled(): - future.set_result(retval) - - async def _call_in_runner_task( - self, - func: Callable[P, Awaitable[T_Retval]], - *args: P.args, - **kwargs: P.kwargs, - ) -> T_Retval: - if not self._runner_task: - self._send_stream, receive_stream = create_memory_object_stream[ - tuple[Awaitable[Any], asyncio.Future] - ](1) - self._runner_task = self.get_loop().create_task( - self._run_tests_and_fixtures(receive_stream) - ) - - coro = func(*args, **kwargs) - future: asyncio.Future[T_Retval] = self.get_loop().create_future() - self._send_stream.send_nowait((coro, future)) - return await future - - def run_asyncgen_fixture( - self, - fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], - kwargs: dict[str, Any], - ) -> Iterable[T_Retval]: - asyncgen = fixture_func(**kwargs) - fixturevalue: T_Retval = self.get_loop().run_until_complete( - self._call_in_runner_task(asyncgen.asend, None) - ) - self._raise_async_exceptions() - - yield fixturevalue - - try: - self.get_loop().run_until_complete( - self._call_in_runner_task(asyncgen.asend, None) - ) - except StopAsyncIteration: - self._raise_async_exceptions() - else: - self.get_loop().run_until_complete(asyncgen.aclose()) - raise RuntimeError("Async generator fixture did not stop") - - def run_fixture( - self, - fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], - kwargs: dict[str, Any], - ) -> T_Retval: - retval = self.get_loop().run_until_complete( - self._call_in_runner_task(fixture_func, **kwargs) - ) - self._raise_async_exceptions() - return retval - - def run_test( - self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] - ) -> None: - try: - self.get_loop().run_until_complete( - self._call_in_runner_task(test_func, **kwargs) - ) - except Exception as exc: - self._exceptions.append(exc) - - self._raise_async_exceptions() - - -class AsyncIOBackend(AsyncBackend): - @classmethod - def run( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - options: dict[str, Any], - ) -> T_Retval: - @wraps(func) - async def wrapper() -> T_Retval: - task = cast(asyncio.Task, current_task()) - task.set_name(get_callable_name(func)) - _task_states[task] = TaskState(None, None) - - try: - return await func(*args) - finally: - del _task_states[task] - - debug = options.get("debug", None) - loop_factory = options.get("loop_factory", None) - if loop_factory is None and options.get("use_uvloop", False): - import uvloop - - loop_factory = uvloop.new_event_loop - - with Runner(debug=debug, loop_factory=loop_factory) as runner: - return runner.run(wrapper()) - - @classmethod - def current_token(cls) -> object: - return get_running_loop() - - @classmethod - def current_time(cls) -> float: - return get_running_loop().time() - - @classmethod - def cancelled_exception_class(cls) -> type[BaseException]: - return CancelledError - - @classmethod - async def checkpoint(cls) -> None: - await sleep(0) - - @classmethod - async def checkpoint_if_cancelled(cls) -> None: - task = current_task() - if task is None: - return - - try: - cancel_scope = _task_states[task].cancel_scope - except KeyError: - return - - while cancel_scope: - if cancel_scope.cancel_called: - await sleep(0) - elif cancel_scope.shield: - break - else: - cancel_scope = cancel_scope._parent_scope - - @classmethod - async def cancel_shielded_checkpoint(cls) -> None: - with CancelScope(shield=True): - await sleep(0) - - @classmethod - async def sleep(cls, delay: float) -> None: - await sleep(delay) - - @classmethod - def create_cancel_scope( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> CancelScope: - return CancelScope(deadline=deadline, shield=shield) - - @classmethod - def current_effective_deadline(cls) -> float: - try: - cancel_scope = _task_states[ - current_task() # type: ignore[index] - ].cancel_scope - except KeyError: - return math.inf - - deadline = math.inf - while cancel_scope: - deadline = min(deadline, cancel_scope.deadline) - if cancel_scope._cancel_called: - deadline = -math.inf - break - elif cancel_scope.shield: - break - else: - cancel_scope = cancel_scope._parent_scope - - return deadline - - @classmethod - def create_task_group(cls) -> abc.TaskGroup: - return TaskGroup() - - @classmethod - def create_event(cls) -> abc.Event: - return Event() - - @classmethod - def create_lock(cls, *, fast_acquire: bool) -> abc.Lock: - return Lock(fast_acquire=fast_acquire) - - @classmethod - def create_semaphore( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> abc.Semaphore: - return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) - - @classmethod - def create_capacity_limiter(cls, total_tokens: float) -> abc.CapacityLimiter: - return CapacityLimiter(total_tokens) - - @classmethod - async def run_sync_in_worker_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - abandon_on_cancel: bool = False, - limiter: abc.CapacityLimiter | None = None, - ) -> T_Retval: - await cls.checkpoint() - - # If this is the first run in this event loop thread, set up the necessary - # variables - try: - idle_workers = _threadpool_idle_workers.get() - workers = _threadpool_workers.get() - except LookupError: - idle_workers = deque() - workers = set() - _threadpool_idle_workers.set(idle_workers) - _threadpool_workers.set(workers) - - async with limiter or cls.current_default_thread_limiter(): - with CancelScope(shield=not abandon_on_cancel) as scope: - future: asyncio.Future = asyncio.Future() - root_task = find_root_task() - if not idle_workers: - worker = WorkerThread(root_task, workers, idle_workers) - worker.start() - workers.add(worker) - root_task.add_done_callback(worker.stop) - else: - worker = idle_workers.pop() - - # Prune any other workers that have been idle for MAX_IDLE_TIME - # seconds or longer - now = cls.current_time() - while idle_workers: - if ( - now - idle_workers[0].idle_since - < WorkerThread.MAX_IDLE_TIME - ): - break - - expired_worker = idle_workers.popleft() - expired_worker.root_task.remove_done_callback( - expired_worker.stop - ) - expired_worker.stop() - - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, None) - if abandon_on_cancel or scope._parent_scope is None: - worker_scope = scope - else: - worker_scope = scope._parent_scope - - worker.queue.put_nowait((context, func, args, future, worker_scope)) - return await future - - @classmethod - def check_cancelled(cls) -> None: - scope: CancelScope | None = threadlocals.current_cancel_scope - while scope is not None: - if scope.cancel_called: - raise CancelledError(f"Cancelled by cancel scope {id(scope):x}") - - if scope.shield: - return - - scope = scope._parent_scope - - @classmethod - def run_async_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - async def task_wrapper(scope: CancelScope) -> T_Retval: - __tracebackhide__ = True - task = cast(asyncio.Task, current_task()) - _task_states[task] = TaskState(None, scope) - scope._tasks.add(task) - try: - return await func(*args) - except CancelledError as exc: - raise concurrent.futures.CancelledError(str(exc)) from None - finally: - scope._tasks.discard(task) - - loop = cast(AbstractEventLoop, token) - context = copy_context() - context.run(sniffio.current_async_library_cvar.set, "asyncio") - wrapper = task_wrapper(threadlocals.current_cancel_scope) - f: concurrent.futures.Future[T_Retval] = context.run( - asyncio.run_coroutine_threadsafe, wrapper, loop - ) - return f.result() - - @classmethod - def run_sync_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - @wraps(func) - def wrapper() -> None: - try: - sniffio.current_async_library_cvar.set("asyncio") - f.set_result(func(*args)) - except BaseException as exc: - f.set_exception(exc) - if not isinstance(exc, Exception): - raise - - f: concurrent.futures.Future[T_Retval] = Future() - loop = cast(AbstractEventLoop, token) - loop.call_soon_threadsafe(wrapper) - return f.result() - - @classmethod - def create_blocking_portal(cls) -> abc.BlockingPortal: - return BlockingPortal() - - @classmethod - async def open_process( - cls, - command: StrOrBytesPath | Sequence[StrOrBytesPath], - *, - stdin: int | IO[Any] | None, - stdout: int | IO[Any] | None, - stderr: int | IO[Any] | None, - **kwargs: Any, - ) -> Process: - await cls.checkpoint() - if isinstance(command, PathLike): - command = os.fspath(command) - - if isinstance(command, (str, bytes)): - process = await asyncio.create_subprocess_shell( - command, - stdin=stdin, - stdout=stdout, - stderr=stderr, - **kwargs, - ) - else: - process = await asyncio.create_subprocess_exec( - *command, - stdin=stdin, - stdout=stdout, - stderr=stderr, - **kwargs, - ) - - stdin_stream = StreamWriterWrapper(process.stdin) if process.stdin else None - stdout_stream = StreamReaderWrapper(process.stdout) if process.stdout else None - stderr_stream = StreamReaderWrapper(process.stderr) if process.stderr else None - return Process(process, stdin_stream, stdout_stream, stderr_stream) - - @classmethod - def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: - create_task( - _shutdown_process_pool_on_exit(workers), - name="AnyIO process pool shutdown task", - ) - find_root_task().add_done_callback( - partial(_forcibly_shutdown_process_pool_on_exit, workers) # type:ignore[arg-type] - ) - - @classmethod - async def connect_tcp( - cls, host: str, port: int, local_address: IPSockAddrType | None = None - ) -> abc.SocketStream: - transport, protocol = cast( - tuple[asyncio.Transport, StreamProtocol], - await get_running_loop().create_connection( - StreamProtocol, host, port, local_addr=local_address - ), - ) - transport.pause_reading() - return SocketStream(transport, protocol) - - @classmethod - async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: - await cls.checkpoint() - loop = get_running_loop() - raw_socket = socket.socket(socket.AF_UNIX) - raw_socket.setblocking(False) - while True: - try: - raw_socket.connect(path) - except BlockingIOError: - f: asyncio.Future = asyncio.Future() - loop.add_writer(raw_socket, f.set_result, None) - f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) - await f - except BaseException: - raw_socket.close() - raise - else: - return UNIXSocketStream(raw_socket) - - @classmethod - def create_tcp_listener(cls, sock: socket.socket) -> SocketListener: - return TCPSocketListener(sock) - - @classmethod - def create_unix_listener(cls, sock: socket.socket) -> SocketListener: - return UNIXSocketListener(sock) - - @classmethod - async def create_udp_socket( - cls, - family: AddressFamily, - local_address: IPSockAddrType | None, - remote_address: IPSockAddrType | None, - reuse_port: bool, - ) -> UDPSocket | ConnectedUDPSocket: - transport, protocol = await get_running_loop().create_datagram_endpoint( - DatagramProtocol, - local_addr=local_address, - remote_addr=remote_address, - family=family, - reuse_port=reuse_port, - ) - if protocol.exception: - transport.close() - raise protocol.exception - - if not remote_address: - return UDPSocket(transport, protocol) - else: - return ConnectedUDPSocket(transport, protocol) - - @classmethod - async def create_unix_datagram_socket( # type: ignore[override] - cls, raw_socket: socket.socket, remote_path: str | bytes | None - ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: - await cls.checkpoint() - loop = get_running_loop() - - if remote_path: - while True: - try: - raw_socket.connect(remote_path) - except BlockingIOError: - f: asyncio.Future = asyncio.Future() - loop.add_writer(raw_socket, f.set_result, None) - f.add_done_callback(lambda _: loop.remove_writer(raw_socket)) - await f - except BaseException: - raw_socket.close() - raise - else: - return ConnectedUNIXDatagramSocket(raw_socket) - else: - return UNIXDatagramSocket(raw_socket) - - @classmethod - async def getaddrinfo( - cls, - host: bytes | str | None, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, - ) -> list[ - tuple[ - AddressFamily, - SocketKind, - int, - str, - tuple[str, int] | tuple[str, int, int, int], - ] - ]: - return await get_running_loop().getaddrinfo( - host, port, family=family, type=type, proto=proto, flags=flags - ) - - @classmethod - async def getnameinfo( - cls, sockaddr: IPSockAddrType, flags: int = 0 - ) -> tuple[str, str]: - return await get_running_loop().getnameinfo(sockaddr, flags) - - @classmethod - async def wait_socket_readable(cls, sock: socket.socket) -> None: - await cls.checkpoint() - try: - read_events = _read_events.get() - except LookupError: - read_events = {} - _read_events.set(read_events) - - if read_events.get(sock): - raise BusyResourceError("reading from") from None - - loop = get_running_loop() - event = read_events[sock] = asyncio.Event() - loop.add_reader(sock, event.set) - try: - await event.wait() - finally: - if read_events.pop(sock, None) is not None: - loop.remove_reader(sock) - readable = True - else: - readable = False - - if not readable: - raise ClosedResourceError - - @classmethod - async def wait_socket_writable(cls, sock: socket.socket) -> None: - await cls.checkpoint() - try: - write_events = _write_events.get() - except LookupError: - write_events = {} - _write_events.set(write_events) - - if write_events.get(sock): - raise BusyResourceError("writing to") from None - - loop = get_running_loop() - event = write_events[sock] = asyncio.Event() - loop.add_writer(sock.fileno(), event.set) - try: - await event.wait() - finally: - if write_events.pop(sock, None) is not None: - loop.remove_writer(sock) - writable = True - else: - writable = False - - if not writable: - raise ClosedResourceError - - @classmethod - def current_default_thread_limiter(cls) -> CapacityLimiter: - try: - return _default_thread_limiter.get() - except LookupError: - limiter = CapacityLimiter(40) - _default_thread_limiter.set(limiter) - return limiter - - @classmethod - def open_signal_receiver( - cls, *signals: Signals - ) -> AbstractContextManager[AsyncIterator[Signals]]: - return _SignalReceiver(signals) - - @classmethod - def get_current_task(cls) -> TaskInfo: - return AsyncIOTaskInfo(current_task()) # type: ignore[arg-type] - - @classmethod - def get_running_tasks(cls) -> Sequence[TaskInfo]: - return [AsyncIOTaskInfo(task) for task in all_tasks() if not task.done()] - - @classmethod - async def wait_all_tasks_blocked(cls) -> None: - await cls.checkpoint() - this_task = current_task() - while True: - for task in all_tasks(): - if task is this_task: - continue - - waiter = task._fut_waiter # type: ignore[attr-defined] - if waiter is None or waiter.done(): - await sleep(0.1) - break - else: - return - - @classmethod - def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: - return TestRunner(**options) - - -backend_class = AsyncIOBackend diff --git a/myenv/Lib/site-packages/anyio/_backends/_trio.py b/myenv/Lib/site-packages/anyio/_backends/_trio.py deleted file mode 100644 index de2189c..0000000 --- a/myenv/Lib/site-packages/anyio/_backends/_trio.py +++ /dev/null @@ -1,1315 +0,0 @@ -from __future__ import annotations - -import array -import math -import os -import socket -import sys -import types -import weakref -from collections.abc import ( - AsyncGenerator, - AsyncIterator, - Awaitable, - Callable, - Collection, - Coroutine, - Iterable, - Sequence, -) -from concurrent.futures import Future -from contextlib import AbstractContextManager -from dataclasses import dataclass -from functools import partial -from io import IOBase -from os import PathLike -from signal import Signals -from socket import AddressFamily, SocketKind -from types import TracebackType -from typing import ( - IO, - Any, - Generic, - NoReturn, - TypeVar, - cast, - overload, -) - -import trio.from_thread -import trio.lowlevel -from outcome import Error, Outcome, Value -from trio.lowlevel import ( - current_root_task, - current_task, - wait_readable, - wait_writable, -) -from trio.socket import SocketType as TrioSocketType -from trio.to_thread import run_sync - -from .. import ( - CapacityLimiterStatistics, - EventStatistics, - LockStatistics, - TaskInfo, - WouldBlock, - abc, -) -from .._core._eventloop import claim_worker_thread -from .._core._exceptions import ( - BrokenResourceError, - BusyResourceError, - ClosedResourceError, - EndOfStream, -) -from .._core._sockets import convert_ipv6_sockaddr -from .._core._streams import create_memory_object_stream -from .._core._synchronization import ( - CapacityLimiter as BaseCapacityLimiter, -) -from .._core._synchronization import Event as BaseEvent -from .._core._synchronization import Lock as BaseLock -from .._core._synchronization import ( - ResourceGuard, - SemaphoreStatistics, -) -from .._core._synchronization import Semaphore as BaseSemaphore -from .._core._tasks import CancelScope as BaseCancelScope -from ..abc import IPSockAddrType, UDPPacketType, UNIXDatagramPacketType -from ..abc._eventloop import AsyncBackend, StrOrBytesPath -from ..streams.memory import MemoryObjectSendStream - -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from exceptiongroup import BaseExceptionGroup - from typing_extensions import TypeVarTuple, Unpack - -T = TypeVar("T") -T_Retval = TypeVar("T_Retval") -T_SockAddr = TypeVar("T_SockAddr", str, IPSockAddrType) -PosArgsT = TypeVarTuple("PosArgsT") -P = ParamSpec("P") - - -# -# Event loop -# - -RunVar = trio.lowlevel.RunVar - - -# -# Timeouts and cancellation -# - - -class CancelScope(BaseCancelScope): - def __new__( - cls, original: trio.CancelScope | None = None, **kwargs: object - ) -> CancelScope: - return object.__new__(cls) - - def __init__(self, original: trio.CancelScope | None = None, **kwargs: Any) -> None: - self.__original = original or trio.CancelScope(**kwargs) - - def __enter__(self) -> CancelScope: - self.__original.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - # https://github.com/python-trio/trio-typing/pull/79 - return self.__original.__exit__(exc_type, exc_val, exc_tb) - - def cancel(self) -> None: - self.__original.cancel() - - @property - def deadline(self) -> float: - return self.__original.deadline - - @deadline.setter - def deadline(self, value: float) -> None: - self.__original.deadline = value - - @property - def cancel_called(self) -> bool: - return self.__original.cancel_called - - @property - def cancelled_caught(self) -> bool: - return self.__original.cancelled_caught - - @property - def shield(self) -> bool: - return self.__original.shield - - @shield.setter - def shield(self, value: bool) -> None: - self.__original.shield = value - - -# -# Task groups -# - - -class TaskGroup(abc.TaskGroup): - def __init__(self) -> None: - self._active = False - self._nursery_manager = trio.open_nursery(strict_exception_groups=True) - self.cancel_scope = None # type: ignore[assignment] - - async def __aenter__(self) -> TaskGroup: - self._active = True - self._nursery = await self._nursery_manager.__aenter__() - self.cancel_scope = CancelScope(self._nursery.cancel_scope) - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - try: - return await self._nursery_manager.__aexit__(exc_type, exc_val, exc_tb) - except BaseExceptionGroup as exc: - _, rest = exc.split(trio.Cancelled) - if not rest: - cancelled_exc = trio.Cancelled._create() - raise cancelled_exc from exc - - raise - finally: - self._active = False - - def start_soon( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], - *args: Unpack[PosArgsT], - name: object = None, - ) -> None: - if not self._active: - raise RuntimeError( - "This task group is not active; no new tasks can be started." - ) - - self._nursery.start_soon(func, *args, name=name) - - async def start( - self, func: Callable[..., Awaitable[Any]], *args: object, name: object = None - ) -> Any: - if not self._active: - raise RuntimeError( - "This task group is not active; no new tasks can be started." - ) - - return await self._nursery.start(func, *args, name=name) - - -# -# Threads -# - - -class BlockingPortal(abc.BlockingPortal): - def __new__(cls) -> BlockingPortal: - return object.__new__(cls) - - def __init__(self) -> None: - super().__init__() - self._token = trio.lowlevel.current_trio_token() - - def _spawn_task_from_thread( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - name: object, - future: Future[T_Retval], - ) -> None: - trio.from_thread.run_sync( - partial(self._task_group.start_soon, name=name), - self._call_func, - func, - args, - kwargs, - future, - trio_token=self._token, - ) - - -# -# Subprocesses -# - - -@dataclass(eq=False) -class ReceiveStreamWrapper(abc.ByteReceiveStream): - _stream: trio.abc.ReceiveStream - - async def receive(self, max_bytes: int | None = None) -> bytes: - try: - data = await self._stream.receive_some(max_bytes) - except trio.ClosedResourceError as exc: - raise ClosedResourceError from exc.__cause__ - except trio.BrokenResourceError as exc: - raise BrokenResourceError from exc.__cause__ - - if data: - return data - else: - raise EndOfStream - - async def aclose(self) -> None: - await self._stream.aclose() - - -@dataclass(eq=False) -class SendStreamWrapper(abc.ByteSendStream): - _stream: trio.abc.SendStream - - async def send(self, item: bytes) -> None: - try: - await self._stream.send_all(item) - except trio.ClosedResourceError as exc: - raise ClosedResourceError from exc.__cause__ - except trio.BrokenResourceError as exc: - raise BrokenResourceError from exc.__cause__ - - async def aclose(self) -> None: - await self._stream.aclose() - - -@dataclass(eq=False) -class Process(abc.Process): - _process: trio.Process - _stdin: abc.ByteSendStream | None - _stdout: abc.ByteReceiveStream | None - _stderr: abc.ByteReceiveStream | None - - async def aclose(self) -> None: - with CancelScope(shield=True): - if self._stdin: - await self._stdin.aclose() - if self._stdout: - await self._stdout.aclose() - if self._stderr: - await self._stderr.aclose() - - try: - await self.wait() - except BaseException: - self.kill() - with CancelScope(shield=True): - await self.wait() - raise - - async def wait(self) -> int: - return await self._process.wait() - - def terminate(self) -> None: - self._process.terminate() - - def kill(self) -> None: - self._process.kill() - - def send_signal(self, signal: Signals) -> None: - self._process.send_signal(signal) - - @property - def pid(self) -> int: - return self._process.pid - - @property - def returncode(self) -> int | None: - return self._process.returncode - - @property - def stdin(self) -> abc.ByteSendStream | None: - return self._stdin - - @property - def stdout(self) -> abc.ByteReceiveStream | None: - return self._stdout - - @property - def stderr(self) -> abc.ByteReceiveStream | None: - return self._stderr - - -class _ProcessPoolShutdownInstrument(trio.abc.Instrument): - def after_run(self) -> None: - super().after_run() - - -current_default_worker_process_limiter: trio.lowlevel.RunVar = RunVar( - "current_default_worker_process_limiter" -) - - -async def _shutdown_process_pool(workers: set[abc.Process]) -> None: - try: - await trio.sleep(math.inf) - except trio.Cancelled: - for process in workers: - if process.returncode is None: - process.kill() - - with CancelScope(shield=True): - for process in workers: - await process.aclose() - - -# -# Sockets and networking -# - - -class _TrioSocketMixin(Generic[T_SockAddr]): - def __init__(self, trio_socket: TrioSocketType) -> None: - self._trio_socket = trio_socket - self._closed = False - - def _check_closed(self) -> None: - if self._closed: - raise ClosedResourceError - if self._trio_socket.fileno() < 0: - raise BrokenResourceError - - @property - def _raw_socket(self) -> socket.socket: - return self._trio_socket._sock # type: ignore[attr-defined] - - async def aclose(self) -> None: - if self._trio_socket.fileno() >= 0: - self._closed = True - self._trio_socket.close() - - def _convert_socket_error(self, exc: BaseException) -> NoReturn: - if isinstance(exc, trio.ClosedResourceError): - raise ClosedResourceError from exc - elif self._trio_socket.fileno() < 0 and self._closed: - raise ClosedResourceError from None - elif isinstance(exc, OSError): - raise BrokenResourceError from exc - else: - raise exc - - -class SocketStream(_TrioSocketMixin, abc.SocketStream): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self, max_bytes: int = 65536) -> bytes: - with self._receive_guard: - try: - data = await self._trio_socket.recv(max_bytes) - except BaseException as exc: - self._convert_socket_error(exc) - - if data: - return data - else: - raise EndOfStream - - async def send(self, item: bytes) -> None: - with self._send_guard: - view = memoryview(item) - while view: - try: - bytes_sent = await self._trio_socket.send(view) - except BaseException as exc: - self._convert_socket_error(exc) - - view = view[bytes_sent:] - - async def send_eof(self) -> None: - self._trio_socket.shutdown(socket.SHUT_WR) - - -class UNIXSocketStream(SocketStream, abc.UNIXSocketStream): - async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: - if not isinstance(msglen, int) or msglen < 0: - raise ValueError("msglen must be a non-negative integer") - if not isinstance(maxfds, int) or maxfds < 1: - raise ValueError("maxfds must be a positive integer") - - fds = array.array("i") - await trio.lowlevel.checkpoint() - with self._receive_guard: - while True: - try: - message, ancdata, flags, addr = await self._trio_socket.recvmsg( - msglen, socket.CMSG_LEN(maxfds * fds.itemsize) - ) - except BaseException as exc: - self._convert_socket_error(exc) - else: - if not message and not ancdata: - raise EndOfStream - - break - - for cmsg_level, cmsg_type, cmsg_data in ancdata: - if cmsg_level != socket.SOL_SOCKET or cmsg_type != socket.SCM_RIGHTS: - raise RuntimeError( - f"Received unexpected ancillary data; message = {message!r}, " - f"cmsg_level = {cmsg_level}, cmsg_type = {cmsg_type}" - ) - - fds.frombytes(cmsg_data[: len(cmsg_data) - (len(cmsg_data) % fds.itemsize)]) - - return message, list(fds) - - async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: - if not message: - raise ValueError("message must not be empty") - if not fds: - raise ValueError("fds must not be empty") - - filenos: list[int] = [] - for fd in fds: - if isinstance(fd, int): - filenos.append(fd) - elif isinstance(fd, IOBase): - filenos.append(fd.fileno()) - - fdarray = array.array("i", filenos) - await trio.lowlevel.checkpoint() - with self._send_guard: - while True: - try: - await self._trio_socket.sendmsg( - [message], - [ - ( - socket.SOL_SOCKET, - socket.SCM_RIGHTS, - fdarray, - ) - ], - ) - break - except BaseException as exc: - self._convert_socket_error(exc) - - -class TCPSocketListener(_TrioSocketMixin, abc.SocketListener): - def __init__(self, raw_socket: socket.socket): - super().__init__(trio.socket.from_stdlib_socket(raw_socket)) - self._accept_guard = ResourceGuard("accepting connections from") - - async def accept(self) -> SocketStream: - with self._accept_guard: - try: - trio_socket, _addr = await self._trio_socket.accept() - except BaseException as exc: - self._convert_socket_error(exc) - - trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - return SocketStream(trio_socket) - - -class UNIXSocketListener(_TrioSocketMixin, abc.SocketListener): - def __init__(self, raw_socket: socket.socket): - super().__init__(trio.socket.from_stdlib_socket(raw_socket)) - self._accept_guard = ResourceGuard("accepting connections from") - - async def accept(self) -> UNIXSocketStream: - with self._accept_guard: - try: - trio_socket, _addr = await self._trio_socket.accept() - except BaseException as exc: - self._convert_socket_error(exc) - - return UNIXSocketStream(trio_socket) - - -class UDPSocket(_TrioSocketMixin[IPSockAddrType], abc.UDPSocket): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self) -> tuple[bytes, IPSockAddrType]: - with self._receive_guard: - try: - data, addr = await self._trio_socket.recvfrom(65536) - return data, convert_ipv6_sockaddr(addr) - except BaseException as exc: - self._convert_socket_error(exc) - - async def send(self, item: UDPPacketType) -> None: - with self._send_guard: - try: - await self._trio_socket.sendto(*item) - except BaseException as exc: - self._convert_socket_error(exc) - - -class ConnectedUDPSocket(_TrioSocketMixin[IPSockAddrType], abc.ConnectedUDPSocket): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self) -> bytes: - with self._receive_guard: - try: - return await self._trio_socket.recv(65536) - except BaseException as exc: - self._convert_socket_error(exc) - - async def send(self, item: bytes) -> None: - with self._send_guard: - try: - await self._trio_socket.send(item) - except BaseException as exc: - self._convert_socket_error(exc) - - -class UNIXDatagramSocket(_TrioSocketMixin[str], abc.UNIXDatagramSocket): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self) -> UNIXDatagramPacketType: - with self._receive_guard: - try: - data, addr = await self._trio_socket.recvfrom(65536) - return data, addr - except BaseException as exc: - self._convert_socket_error(exc) - - async def send(self, item: UNIXDatagramPacketType) -> None: - with self._send_guard: - try: - await self._trio_socket.sendto(*item) - except BaseException as exc: - self._convert_socket_error(exc) - - -class ConnectedUNIXDatagramSocket( - _TrioSocketMixin[str], abc.ConnectedUNIXDatagramSocket -): - def __init__(self, trio_socket: TrioSocketType) -> None: - super().__init__(trio_socket) - self._receive_guard = ResourceGuard("reading from") - self._send_guard = ResourceGuard("writing to") - - async def receive(self) -> bytes: - with self._receive_guard: - try: - return await self._trio_socket.recv(65536) - except BaseException as exc: - self._convert_socket_error(exc) - - async def send(self, item: bytes) -> None: - with self._send_guard: - try: - await self._trio_socket.send(item) - except BaseException as exc: - self._convert_socket_error(exc) - - -# -# Synchronization -# - - -class Event(BaseEvent): - def __new__(cls) -> Event: - return object.__new__(cls) - - def __init__(self) -> None: - self.__original = trio.Event() - - def is_set(self) -> bool: - return self.__original.is_set() - - async def wait(self) -> None: - return await self.__original.wait() - - def statistics(self) -> EventStatistics: - orig_statistics = self.__original.statistics() - return EventStatistics(tasks_waiting=orig_statistics.tasks_waiting) - - def set(self) -> None: - self.__original.set() - - -class Lock(BaseLock): - def __new__(cls, *, fast_acquire: bool = False) -> Lock: - return object.__new__(cls) - - def __init__(self, *, fast_acquire: bool = False) -> None: - self._fast_acquire = fast_acquire - self.__original = trio.Lock() - - async def acquire(self) -> None: - if not self._fast_acquire: - await self.__original.acquire() - return - - # This is the "fast path" where we don't let other tasks run - await trio.lowlevel.checkpoint_if_cancelled() - try: - self.__original.acquire_nowait() - except trio.WouldBlock: - await self.__original._lot.park() - - def acquire_nowait(self) -> None: - try: - self.__original.acquire_nowait() - except trio.WouldBlock: - raise WouldBlock from None - - def locked(self) -> bool: - return self.__original.locked() - - def release(self) -> None: - self.__original.release() - - def statistics(self) -> LockStatistics: - orig_statistics = self.__original.statistics() - owner = TrioTaskInfo(orig_statistics.owner) if orig_statistics.owner else None - return LockStatistics( - orig_statistics.locked, owner, orig_statistics.tasks_waiting - ) - - -class Semaphore(BaseSemaphore): - def __new__( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> Semaphore: - return object.__new__(cls) - - def __init__( - self, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> None: - super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) - self.__original = trio.Semaphore(initial_value, max_value=max_value) - - async def acquire(self) -> None: - if not self._fast_acquire: - await self.__original.acquire() - return - - # This is the "fast path" where we don't let other tasks run - await trio.lowlevel.checkpoint_if_cancelled() - try: - self.__original.acquire_nowait() - except trio.WouldBlock: - await self.__original._lot.park() - - def acquire_nowait(self) -> None: - try: - self.__original.acquire_nowait() - except trio.WouldBlock: - raise WouldBlock from None - - @property - def max_value(self) -> int | None: - return self.__original.max_value - - @property - def value(self) -> int: - return self.__original.value - - def release(self) -> None: - self.__original.release() - - def statistics(self) -> SemaphoreStatistics: - orig_statistics = self.__original.statistics() - return SemaphoreStatistics(orig_statistics.tasks_waiting) - - -class CapacityLimiter(BaseCapacityLimiter): - def __new__( - cls, - total_tokens: float | None = None, - *, - original: trio.CapacityLimiter | None = None, - ) -> CapacityLimiter: - return object.__new__(cls) - - def __init__( - self, - total_tokens: float | None = None, - *, - original: trio.CapacityLimiter | None = None, - ) -> None: - if original is not None: - self.__original = original - else: - assert total_tokens is not None - self.__original = trio.CapacityLimiter(total_tokens) - - async def __aenter__(self) -> None: - return await self.__original.__aenter__() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - await self.__original.__aexit__(exc_type, exc_val, exc_tb) - - @property - def total_tokens(self) -> float: - return self.__original.total_tokens - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - self.__original.total_tokens = value - - @property - def borrowed_tokens(self) -> int: - return self.__original.borrowed_tokens - - @property - def available_tokens(self) -> float: - return self.__original.available_tokens - - def acquire_nowait(self) -> None: - self.__original.acquire_nowait() - - def acquire_on_behalf_of_nowait(self, borrower: object) -> None: - self.__original.acquire_on_behalf_of_nowait(borrower) - - async def acquire(self) -> None: - await self.__original.acquire() - - async def acquire_on_behalf_of(self, borrower: object) -> None: - await self.__original.acquire_on_behalf_of(borrower) - - def release(self) -> None: - return self.__original.release() - - def release_on_behalf_of(self, borrower: object) -> None: - return self.__original.release_on_behalf_of(borrower) - - def statistics(self) -> CapacityLimiterStatistics: - orig = self.__original.statistics() - return CapacityLimiterStatistics( - borrowed_tokens=orig.borrowed_tokens, - total_tokens=orig.total_tokens, - borrowers=tuple(orig.borrowers), - tasks_waiting=orig.tasks_waiting, - ) - - -_capacity_limiter_wrapper: trio.lowlevel.RunVar = RunVar("_capacity_limiter_wrapper") - - -# -# Signal handling -# - - -class _SignalReceiver: - _iterator: AsyncIterator[int] - - def __init__(self, signals: tuple[Signals, ...]): - self._signals = signals - - def __enter__(self) -> _SignalReceiver: - self._cm = trio.open_signal_receiver(*self._signals) - self._iterator = self._cm.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - return self._cm.__exit__(exc_type, exc_val, exc_tb) - - def __aiter__(self) -> _SignalReceiver: - return self - - async def __anext__(self) -> Signals: - signum = await self._iterator.__anext__() - return Signals(signum) - - -# -# Testing and debugging -# - - -class TestRunner(abc.TestRunner): - def __init__(self, **options: Any) -> None: - from queue import Queue - - self._call_queue: Queue[Callable[[], object]] = Queue() - self._send_stream: MemoryObjectSendStream | None = None - self._options = options - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: types.TracebackType | None, - ) -> None: - if self._send_stream: - self._send_stream.close() - while self._send_stream is not None: - self._call_queue.get()() - - async def _run_tests_and_fixtures(self) -> None: - self._send_stream, receive_stream = create_memory_object_stream(1) - with receive_stream: - async for coro, outcome_holder in receive_stream: - try: - retval = await coro - except BaseException as exc: - outcome_holder.append(Error(exc)) - else: - outcome_holder.append(Value(retval)) - - def _main_task_finished(self, outcome: object) -> None: - self._send_stream = None - - def _call_in_runner_task( - self, - func: Callable[P, Awaitable[T_Retval]], - *args: P.args, - **kwargs: P.kwargs, - ) -> T_Retval: - if self._send_stream is None: - trio.lowlevel.start_guest_run( - self._run_tests_and_fixtures, - run_sync_soon_threadsafe=self._call_queue.put, - done_callback=self._main_task_finished, - **self._options, - ) - while self._send_stream is None: - self._call_queue.get()() - - outcome_holder: list[Outcome] = [] - self._send_stream.send_nowait((func(*args, **kwargs), outcome_holder)) - while not outcome_holder: - self._call_queue.get()() - - return outcome_holder[0].unwrap() - - def run_asyncgen_fixture( - self, - fixture_func: Callable[..., AsyncGenerator[T_Retval, Any]], - kwargs: dict[str, Any], - ) -> Iterable[T_Retval]: - asyncgen = fixture_func(**kwargs) - fixturevalue: T_Retval = self._call_in_runner_task(asyncgen.asend, None) - - yield fixturevalue - - try: - self._call_in_runner_task(asyncgen.asend, None) - except StopAsyncIteration: - pass - else: - self._call_in_runner_task(asyncgen.aclose) - raise RuntimeError("Async generator fixture did not stop") - - def run_fixture( - self, - fixture_func: Callable[..., Coroutine[Any, Any, T_Retval]], - kwargs: dict[str, Any], - ) -> T_Retval: - return self._call_in_runner_task(fixture_func, **kwargs) - - def run_test( - self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] - ) -> None: - self._call_in_runner_task(test_func, **kwargs) - - -class TrioTaskInfo(TaskInfo): - def __init__(self, task: trio.lowlevel.Task): - parent_id = None - if task.parent_nursery and task.parent_nursery.parent_task: - parent_id = id(task.parent_nursery.parent_task) - - super().__init__(id(task), parent_id, task.name, task.coro) - self._task = weakref.proxy(task) - - def has_pending_cancellation(self) -> bool: - try: - return self._task._cancel_status.effectively_cancelled - except ReferenceError: - # If the task is no longer around, it surely doesn't have a cancellation - # pending - return False - - -class TrioBackend(AsyncBackend): - @classmethod - def run( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - options: dict[str, Any], - ) -> T_Retval: - return trio.run(func, *args) - - @classmethod - def current_token(cls) -> object: - return trio.lowlevel.current_trio_token() - - @classmethod - def current_time(cls) -> float: - return trio.current_time() - - @classmethod - def cancelled_exception_class(cls) -> type[BaseException]: - return trio.Cancelled - - @classmethod - async def checkpoint(cls) -> None: - await trio.lowlevel.checkpoint() - - @classmethod - async def checkpoint_if_cancelled(cls) -> None: - await trio.lowlevel.checkpoint_if_cancelled() - - @classmethod - async def cancel_shielded_checkpoint(cls) -> None: - await trio.lowlevel.cancel_shielded_checkpoint() - - @classmethod - async def sleep(cls, delay: float) -> None: - await trio.sleep(delay) - - @classmethod - def create_cancel_scope( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> abc.CancelScope: - return CancelScope(deadline=deadline, shield=shield) - - @classmethod - def current_effective_deadline(cls) -> float: - return trio.current_effective_deadline() - - @classmethod - def create_task_group(cls) -> abc.TaskGroup: - return TaskGroup() - - @classmethod - def create_event(cls) -> abc.Event: - return Event() - - @classmethod - def create_lock(cls, *, fast_acquire: bool) -> Lock: - return Lock(fast_acquire=fast_acquire) - - @classmethod - def create_semaphore( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> abc.Semaphore: - return Semaphore(initial_value, max_value=max_value, fast_acquire=fast_acquire) - - @classmethod - def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: - return CapacityLimiter(total_tokens) - - @classmethod - async def run_sync_in_worker_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - abandon_on_cancel: bool = False, - limiter: abc.CapacityLimiter | None = None, - ) -> T_Retval: - def wrapper() -> T_Retval: - with claim_worker_thread(TrioBackend, token): - return func(*args) - - token = TrioBackend.current_token() - return await run_sync( - wrapper, - abandon_on_cancel=abandon_on_cancel, - limiter=cast(trio.CapacityLimiter, limiter), - ) - - @classmethod - def check_cancelled(cls) -> None: - trio.from_thread.check_cancelled() - - @classmethod - def run_async_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - return trio.from_thread.run(func, *args) - - @classmethod - def run_sync_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - return trio.from_thread.run_sync(func, *args) - - @classmethod - def create_blocking_portal(cls) -> abc.BlockingPortal: - return BlockingPortal() - - @classmethod - async def open_process( - cls, - command: StrOrBytesPath | Sequence[StrOrBytesPath], - *, - stdin: int | IO[Any] | None, - stdout: int | IO[Any] | None, - stderr: int | IO[Any] | None, - **kwargs: Any, - ) -> Process: - def convert_item(item: StrOrBytesPath) -> str: - str_or_bytes = os.fspath(item) - if isinstance(str_or_bytes, str): - return str_or_bytes - else: - return os.fsdecode(str_or_bytes) - - if isinstance(command, (str, bytes, PathLike)): - process = await trio.lowlevel.open_process( - convert_item(command), - stdin=stdin, - stdout=stdout, - stderr=stderr, - shell=True, - **kwargs, - ) - else: - process = await trio.lowlevel.open_process( - [convert_item(item) for item in command], - stdin=stdin, - stdout=stdout, - stderr=stderr, - shell=False, - **kwargs, - ) - - stdin_stream = SendStreamWrapper(process.stdin) if process.stdin else None - stdout_stream = ReceiveStreamWrapper(process.stdout) if process.stdout else None - stderr_stream = ReceiveStreamWrapper(process.stderr) if process.stderr else None - return Process(process, stdin_stream, stdout_stream, stderr_stream) - - @classmethod - def setup_process_pool_exit_at_shutdown(cls, workers: set[abc.Process]) -> None: - trio.lowlevel.spawn_system_task(_shutdown_process_pool, workers) - - @classmethod - async def connect_tcp( - cls, host: str, port: int, local_address: IPSockAddrType | None = None - ) -> SocketStream: - family = socket.AF_INET6 if ":" in host else socket.AF_INET - trio_socket = trio.socket.socket(family) - trio_socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - if local_address: - await trio_socket.bind(local_address) - - try: - await trio_socket.connect((host, port)) - except BaseException: - trio_socket.close() - raise - - return SocketStream(trio_socket) - - @classmethod - async def connect_unix(cls, path: str | bytes) -> abc.UNIXSocketStream: - trio_socket = trio.socket.socket(socket.AF_UNIX) - try: - await trio_socket.connect(path) - except BaseException: - trio_socket.close() - raise - - return UNIXSocketStream(trio_socket) - - @classmethod - def create_tcp_listener(cls, sock: socket.socket) -> abc.SocketListener: - return TCPSocketListener(sock) - - @classmethod - def create_unix_listener(cls, sock: socket.socket) -> abc.SocketListener: - return UNIXSocketListener(sock) - - @classmethod - async def create_udp_socket( - cls, - family: socket.AddressFamily, - local_address: IPSockAddrType | None, - remote_address: IPSockAddrType | None, - reuse_port: bool, - ) -> UDPSocket | ConnectedUDPSocket: - trio_socket = trio.socket.socket(family=family, type=socket.SOCK_DGRAM) - - if reuse_port: - trio_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - - if local_address: - await trio_socket.bind(local_address) - - if remote_address: - await trio_socket.connect(remote_address) - return ConnectedUDPSocket(trio_socket) - else: - return UDPSocket(trio_socket) - - @classmethod - @overload - async def create_unix_datagram_socket( - cls, raw_socket: socket.socket, remote_path: None - ) -> abc.UNIXDatagramSocket: ... - - @classmethod - @overload - async def create_unix_datagram_socket( - cls, raw_socket: socket.socket, remote_path: str | bytes - ) -> abc.ConnectedUNIXDatagramSocket: ... - - @classmethod - async def create_unix_datagram_socket( - cls, raw_socket: socket.socket, remote_path: str | bytes | None - ) -> abc.UNIXDatagramSocket | abc.ConnectedUNIXDatagramSocket: - trio_socket = trio.socket.from_stdlib_socket(raw_socket) - - if remote_path: - await trio_socket.connect(remote_path) - return ConnectedUNIXDatagramSocket(trio_socket) - else: - return UNIXDatagramSocket(trio_socket) - - @classmethod - async def getaddrinfo( - cls, - host: bytes | str | None, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, - ) -> list[ - tuple[ - AddressFamily, - SocketKind, - int, - str, - tuple[str, int] | tuple[str, int, int, int], - ] - ]: - return await trio.socket.getaddrinfo(host, port, family, type, proto, flags) - - @classmethod - async def getnameinfo( - cls, sockaddr: IPSockAddrType, flags: int = 0 - ) -> tuple[str, str]: - return await trio.socket.getnameinfo(sockaddr, flags) - - @classmethod - async def wait_socket_readable(cls, sock: socket.socket) -> None: - try: - await wait_readable(sock) - except trio.ClosedResourceError as exc: - raise ClosedResourceError().with_traceback(exc.__traceback__) from None - except trio.BusyResourceError: - raise BusyResourceError("reading from") from None - - @classmethod - async def wait_socket_writable(cls, sock: socket.socket) -> None: - try: - await wait_writable(sock) - except trio.ClosedResourceError as exc: - raise ClosedResourceError().with_traceback(exc.__traceback__) from None - except trio.BusyResourceError: - raise BusyResourceError("writing to") from None - - @classmethod - def current_default_thread_limiter(cls) -> CapacityLimiter: - try: - return _capacity_limiter_wrapper.get() - except LookupError: - limiter = CapacityLimiter( - original=trio.to_thread.current_default_thread_limiter() - ) - _capacity_limiter_wrapper.set(limiter) - return limiter - - @classmethod - def open_signal_receiver( - cls, *signals: Signals - ) -> AbstractContextManager[AsyncIterator[Signals]]: - return _SignalReceiver(signals) - - @classmethod - def get_current_task(cls) -> TaskInfo: - task = current_task() - return TrioTaskInfo(task) - - @classmethod - def get_running_tasks(cls) -> Sequence[TaskInfo]: - root_task = current_root_task() - assert root_task - task_infos = [TrioTaskInfo(root_task)] - nurseries = root_task.child_nurseries - while nurseries: - new_nurseries: list[trio.Nursery] = [] - for nursery in nurseries: - for task in nursery.child_tasks: - task_infos.append(TrioTaskInfo(task)) - new_nurseries.extend(task.child_nurseries) - - nurseries = new_nurseries - - return task_infos - - @classmethod - async def wait_all_tasks_blocked(cls) -> None: - from trio.testing import wait_all_tasks_blocked - - await wait_all_tasks_blocked() - - @classmethod - def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: - return TestRunner(**options) - - -backend_class = TrioBackend diff --git a/myenv/Lib/site-packages/anyio/_core/__init__.py b/myenv/Lib/site-packages/anyio/_core/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/anyio/_core/_eventloop.py b/myenv/Lib/site-packages/anyio/_core/_eventloop.py deleted file mode 100644 index 6dcb458..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_eventloop.py +++ /dev/null @@ -1,166 +0,0 @@ -from __future__ import annotations - -import math -import sys -import threading -from collections.abc import Awaitable, Callable, Generator -from contextlib import contextmanager -from importlib import import_module -from typing import TYPE_CHECKING, Any, TypeVar - -import sniffio - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -if TYPE_CHECKING: - from ..abc import AsyncBackend - -# This must be updated when new backends are introduced -BACKENDS = "asyncio", "trio" - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") - -threadlocals = threading.local() -loaded_backends: dict[str, type[AsyncBackend]] = {} - - -def run( - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - *args: Unpack[PosArgsT], - backend: str = "asyncio", - backend_options: dict[str, Any] | None = None, -) -> T_Retval: - """ - Run the given coroutine function in an asynchronous event loop. - - The current thread must not be already running an event loop. - - :param func: a coroutine function - :param args: positional arguments to ``func`` - :param backend: name of the asynchronous event loop implementation – currently - either ``asyncio`` or ``trio`` - :param backend_options: keyword arguments to call the backend ``run()`` - implementation with (documented :ref:`here `) - :return: the return value of the coroutine function - :raises RuntimeError: if an asynchronous event loop is already running in this - thread - :raises LookupError: if the named backend is not found - - """ - try: - asynclib_name = sniffio.current_async_library() - except sniffio.AsyncLibraryNotFoundError: - pass - else: - raise RuntimeError(f"Already running {asynclib_name} in this thread") - - try: - async_backend = get_async_backend(backend) - except ImportError as exc: - raise LookupError(f"No such backend: {backend}") from exc - - token = None - if sniffio.current_async_library_cvar.get(None) is None: - # Since we're in control of the event loop, we can cache the name of the async - # library - token = sniffio.current_async_library_cvar.set(backend) - - try: - backend_options = backend_options or {} - return async_backend.run(func, args, {}, backend_options) - finally: - if token: - sniffio.current_async_library_cvar.reset(token) - - -async def sleep(delay: float) -> None: - """ - Pause the current task for the specified duration. - - :param delay: the duration, in seconds - - """ - return await get_async_backend().sleep(delay) - - -async def sleep_forever() -> None: - """ - Pause the current task until it's cancelled. - - This is a shortcut for ``sleep(math.inf)``. - - .. versionadded:: 3.1 - - """ - await sleep(math.inf) - - -async def sleep_until(deadline: float) -> None: - """ - Pause the current task until the given time. - - :param deadline: the absolute time to wake up at (according to the internal - monotonic clock of the event loop) - - .. versionadded:: 3.1 - - """ - now = current_time() - await sleep(max(deadline - now, 0)) - - -def current_time() -> float: - """ - Return the current value of the event loop's internal clock. - - :return: the clock value (seconds) - - """ - return get_async_backend().current_time() - - -def get_all_backends() -> tuple[str, ...]: - """Return a tuple of the names of all built-in backends.""" - return BACKENDS - - -def get_cancelled_exc_class() -> type[BaseException]: - """Return the current async library's cancellation exception class.""" - return get_async_backend().cancelled_exception_class() - - -# -# Private API -# - - -@contextmanager -def claim_worker_thread( - backend_class: type[AsyncBackend], token: object -) -> Generator[Any, None, None]: - threadlocals.current_async_backend = backend_class - threadlocals.current_token = token - try: - yield - finally: - del threadlocals.current_async_backend - del threadlocals.current_token - - -def get_async_backend(asynclib_name: str | None = None) -> type[AsyncBackend]: - if asynclib_name is None: - asynclib_name = sniffio.current_async_library() - - # We use our own dict instead of sys.modules to get the already imported back-end - # class because the appropriate modules in sys.modules could potentially be only - # partially initialized - try: - return loaded_backends[asynclib_name] - except KeyError: - module = import_module(f"anyio._backends._{asynclib_name}") - loaded_backends[asynclib_name] = module.backend_class - return module.backend_class diff --git a/myenv/Lib/site-packages/anyio/_core/_exceptions.py b/myenv/Lib/site-packages/anyio/_core/_exceptions.py deleted file mode 100644 index 6e3f8cc..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_exceptions.py +++ /dev/null @@ -1,89 +0,0 @@ -from __future__ import annotations - -import sys -from collections.abc import Generator - -if sys.version_info < (3, 11): - from exceptiongroup import BaseExceptionGroup - - -class BrokenResourceError(Exception): - """ - Raised when trying to use a resource that has been rendered unusable due to external - causes (e.g. a send stream whose peer has disconnected). - """ - - -class BrokenWorkerProcess(Exception): - """ - Raised by :func:`run_sync_in_process` if the worker process terminates abruptly or - otherwise misbehaves. - """ - - -class BusyResourceError(Exception): - """ - Raised when two tasks are trying to read from or write to the same resource - concurrently. - """ - - def __init__(self, action: str): - super().__init__(f"Another task is already {action} this resource") - - -class ClosedResourceError(Exception): - """Raised when trying to use a resource that has been closed.""" - - -class DelimiterNotFound(Exception): - """ - Raised during - :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the - maximum number of bytes has been read without the delimiter being found. - """ - - def __init__(self, max_bytes: int) -> None: - super().__init__( - f"The delimiter was not found among the first {max_bytes} bytes" - ) - - -class EndOfStream(Exception): - """ - Raised when trying to read from a stream that has been closed from the other end. - """ - - -class IncompleteRead(Exception): - """ - Raised during - :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_exactly` or - :meth:`~anyio.streams.buffered.BufferedByteReceiveStream.receive_until` if the - connection is closed before the requested amount of bytes has been read. - """ - - def __init__(self) -> None: - super().__init__( - "The stream was closed before the read operation could be completed" - ) - - -class TypedAttributeLookupError(LookupError): - """ - Raised by :meth:`~anyio.TypedAttributeProvider.extra` when the given typed attribute - is not found and no default value has been given. - """ - - -class WouldBlock(Exception): - """Raised by ``X_nowait`` functions if ``X()`` would block.""" - - -def iterate_exceptions( - exception: BaseException, -) -> Generator[BaseException, None, None]: - if isinstance(exception, BaseExceptionGroup): - for exc in exception.exceptions: - yield from iterate_exceptions(exc) - else: - yield exception diff --git a/myenv/Lib/site-packages/anyio/_core/_fileio.py b/myenv/Lib/site-packages/anyio/_core/_fileio.py deleted file mode 100644 index 23ccb0d..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_fileio.py +++ /dev/null @@ -1,654 +0,0 @@ -from __future__ import annotations - -import os -import pathlib -import sys -from collections.abc import AsyncIterator, Callable, Iterable, Iterator, Sequence -from dataclasses import dataclass -from functools import partial -from os import PathLike -from typing import ( - IO, - TYPE_CHECKING, - Any, - AnyStr, - Final, - Generic, - overload, -) - -from .. import to_thread -from ..abc import AsyncResource - -if TYPE_CHECKING: - from _typeshed import OpenBinaryMode, OpenTextMode, ReadableBuffer, WriteableBuffer -else: - ReadableBuffer = OpenBinaryMode = OpenTextMode = WriteableBuffer = object - - -class AsyncFile(AsyncResource, Generic[AnyStr]): - """ - An asynchronous file object. - - This class wraps a standard file object and provides async friendly versions of the - following blocking methods (where available on the original file object): - - * read - * read1 - * readline - * readlines - * readinto - * readinto1 - * write - * writelines - * truncate - * seek - * tell - * flush - - All other methods are directly passed through. - - This class supports the asynchronous context manager protocol which closes the - underlying file at the end of the context block. - - This class also supports asynchronous iteration:: - - async with await open_file(...) as f: - async for line in f: - print(line) - """ - - def __init__(self, fp: IO[AnyStr]) -> None: - self._fp: Any = fp - - def __getattr__(self, name: str) -> object: - return getattr(self._fp, name) - - @property - def wrapped(self) -> IO[AnyStr]: - """The wrapped file object.""" - return self._fp - - async def __aiter__(self) -> AsyncIterator[AnyStr]: - while True: - line = await self.readline() - if line: - yield line - else: - break - - async def aclose(self) -> None: - return await to_thread.run_sync(self._fp.close) - - async def read(self, size: int = -1) -> AnyStr: - return await to_thread.run_sync(self._fp.read, size) - - async def read1(self: AsyncFile[bytes], size: int = -1) -> bytes: - return await to_thread.run_sync(self._fp.read1, size) - - async def readline(self) -> AnyStr: - return await to_thread.run_sync(self._fp.readline) - - async def readlines(self) -> list[AnyStr]: - return await to_thread.run_sync(self._fp.readlines) - - async def readinto(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes: - return await to_thread.run_sync(self._fp.readinto, b) - - async def readinto1(self: AsyncFile[bytes], b: WriteableBuffer) -> bytes: - return await to_thread.run_sync(self._fp.readinto1, b) - - @overload - async def write(self: AsyncFile[bytes], b: ReadableBuffer) -> int: ... - - @overload - async def write(self: AsyncFile[str], b: str) -> int: ... - - async def write(self, b: ReadableBuffer | str) -> int: - return await to_thread.run_sync(self._fp.write, b) - - @overload - async def writelines( - self: AsyncFile[bytes], lines: Iterable[ReadableBuffer] - ) -> None: ... - - @overload - async def writelines(self: AsyncFile[str], lines: Iterable[str]) -> None: ... - - async def writelines(self, lines: Iterable[ReadableBuffer] | Iterable[str]) -> None: - return await to_thread.run_sync(self._fp.writelines, lines) - - async def truncate(self, size: int | None = None) -> int: - return await to_thread.run_sync(self._fp.truncate, size) - - async def seek(self, offset: int, whence: int | None = os.SEEK_SET) -> int: - return await to_thread.run_sync(self._fp.seek, offset, whence) - - async def tell(self) -> int: - return await to_thread.run_sync(self._fp.tell) - - async def flush(self) -> None: - return await to_thread.run_sync(self._fp.flush) - - -@overload -async def open_file( - file: str | PathLike[str] | int, - mode: OpenBinaryMode, - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - closefd: bool = ..., - opener: Callable[[str, int], int] | None = ..., -) -> AsyncFile[bytes]: ... - - -@overload -async def open_file( - file: str | PathLike[str] | int, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - closefd: bool = ..., - opener: Callable[[str, int], int] | None = ..., -) -> AsyncFile[str]: ... - - -async def open_file( - file: str | PathLike[str] | int, - mode: str = "r", - buffering: int = -1, - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - closefd: bool = True, - opener: Callable[[str, int], int] | None = None, -) -> AsyncFile[Any]: - """ - Open a file asynchronously. - - The arguments are exactly the same as for the builtin :func:`open`. - - :return: an asynchronous file object - - """ - fp = await to_thread.run_sync( - open, file, mode, buffering, encoding, errors, newline, closefd, opener - ) - return AsyncFile(fp) - - -def wrap_file(file: IO[AnyStr]) -> AsyncFile[AnyStr]: - """ - Wrap an existing file as an asynchronous file. - - :param file: an existing file-like object - :return: an asynchronous file object - - """ - return AsyncFile(file) - - -@dataclass(eq=False) -class _PathIterator(AsyncIterator["Path"]): - iterator: Iterator[PathLike[str]] - - async def __anext__(self) -> Path: - nextval = await to_thread.run_sync( - next, self.iterator, None, abandon_on_cancel=True - ) - if nextval is None: - raise StopAsyncIteration from None - - return Path(nextval) - - -class Path: - """ - An asynchronous version of :class:`pathlib.Path`. - - This class cannot be substituted for :class:`pathlib.Path` or - :class:`pathlib.PurePath`, but it is compatible with the :class:`os.PathLike` - interface. - - It implements the Python 3.10 version of :class:`pathlib.Path` interface, except for - the deprecated :meth:`~pathlib.Path.link_to` method. - - Any methods that do disk I/O need to be awaited on. These methods are: - - * :meth:`~pathlib.Path.absolute` - * :meth:`~pathlib.Path.chmod` - * :meth:`~pathlib.Path.cwd` - * :meth:`~pathlib.Path.exists` - * :meth:`~pathlib.Path.expanduser` - * :meth:`~pathlib.Path.group` - * :meth:`~pathlib.Path.hardlink_to` - * :meth:`~pathlib.Path.home` - * :meth:`~pathlib.Path.is_block_device` - * :meth:`~pathlib.Path.is_char_device` - * :meth:`~pathlib.Path.is_dir` - * :meth:`~pathlib.Path.is_fifo` - * :meth:`~pathlib.Path.is_file` - * :meth:`~pathlib.Path.is_mount` - * :meth:`~pathlib.Path.lchmod` - * :meth:`~pathlib.Path.lstat` - * :meth:`~pathlib.Path.mkdir` - * :meth:`~pathlib.Path.open` - * :meth:`~pathlib.Path.owner` - * :meth:`~pathlib.Path.read_bytes` - * :meth:`~pathlib.Path.read_text` - * :meth:`~pathlib.Path.readlink` - * :meth:`~pathlib.Path.rename` - * :meth:`~pathlib.Path.replace` - * :meth:`~pathlib.Path.rmdir` - * :meth:`~pathlib.Path.samefile` - * :meth:`~pathlib.Path.stat` - * :meth:`~pathlib.Path.touch` - * :meth:`~pathlib.Path.unlink` - * :meth:`~pathlib.Path.write_bytes` - * :meth:`~pathlib.Path.write_text` - - Additionally, the following methods return an async iterator yielding - :class:`~.Path` objects: - - * :meth:`~pathlib.Path.glob` - * :meth:`~pathlib.Path.iterdir` - * :meth:`~pathlib.Path.rglob` - """ - - __slots__ = "_path", "__weakref__" - - __weakref__: Any - - def __init__(self, *args: str | PathLike[str]) -> None: - self._path: Final[pathlib.Path] = pathlib.Path(*args) - - def __fspath__(self) -> str: - return self._path.__fspath__() - - def __str__(self) -> str: - return self._path.__str__() - - def __repr__(self) -> str: - return f"{self.__class__.__name__}({self.as_posix()!r})" - - def __bytes__(self) -> bytes: - return self._path.__bytes__() - - def __hash__(self) -> int: - return self._path.__hash__() - - def __eq__(self, other: object) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__eq__(target) - - def __lt__(self, other: pathlib.PurePath | Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__lt__(target) - - def __le__(self, other: pathlib.PurePath | Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__le__(target) - - def __gt__(self, other: pathlib.PurePath | Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__gt__(target) - - def __ge__(self, other: pathlib.PurePath | Path) -> bool: - target = other._path if isinstance(other, Path) else other - return self._path.__ge__(target) - - def __truediv__(self, other: str | PathLike[str]) -> Path: - return Path(self._path / other) - - def __rtruediv__(self, other: str | PathLike[str]) -> Path: - return Path(other) / self - - @property - def parts(self) -> tuple[str, ...]: - return self._path.parts - - @property - def drive(self) -> str: - return self._path.drive - - @property - def root(self) -> str: - return self._path.root - - @property - def anchor(self) -> str: - return self._path.anchor - - @property - def parents(self) -> Sequence[Path]: - return tuple(Path(p) for p in self._path.parents) - - @property - def parent(self) -> Path: - return Path(self._path.parent) - - @property - def name(self) -> str: - return self._path.name - - @property - def suffix(self) -> str: - return self._path.suffix - - @property - def suffixes(self) -> list[str]: - return self._path.suffixes - - @property - def stem(self) -> str: - return self._path.stem - - async def absolute(self) -> Path: - path = await to_thread.run_sync(self._path.absolute) - return Path(path) - - def as_posix(self) -> str: - return self._path.as_posix() - - def as_uri(self) -> str: - return self._path.as_uri() - - if sys.version_info >= (3, 13): - parser = pathlib.Path.parser - - @classmethod - def from_uri(cls, uri: str) -> Path: - return Path(pathlib.Path.from_uri(uri)) - - def full_match( - self, path_pattern: str, *, case_sensitive: bool | None = None - ) -> bool: - return self._path.full_match(path_pattern, case_sensitive=case_sensitive) - - def match( - self, path_pattern: str, *, case_sensitive: bool | None = None - ) -> bool: - return self._path.match(path_pattern, case_sensitive=case_sensitive) - else: - - def match(self, path_pattern: str) -> bool: - return self._path.match(path_pattern) - - def is_relative_to(self, other: str | PathLike[str]) -> bool: - try: - self.relative_to(other) - return True - except ValueError: - return False - - async def is_junction(self) -> bool: - return await to_thread.run_sync(self._path.is_junction) - - async def chmod(self, mode: int, *, follow_symlinks: bool = True) -> None: - func = partial(os.chmod, follow_symlinks=follow_symlinks) - return await to_thread.run_sync(func, self._path, mode) - - @classmethod - async def cwd(cls) -> Path: - path = await to_thread.run_sync(pathlib.Path.cwd) - return cls(path) - - async def exists(self) -> bool: - return await to_thread.run_sync(self._path.exists, abandon_on_cancel=True) - - async def expanduser(self) -> Path: - return Path( - await to_thread.run_sync(self._path.expanduser, abandon_on_cancel=True) - ) - - def glob(self, pattern: str) -> AsyncIterator[Path]: - gen = self._path.glob(pattern) - return _PathIterator(gen) - - async def group(self) -> str: - return await to_thread.run_sync(self._path.group, abandon_on_cancel=True) - - async def hardlink_to( - self, target: str | bytes | PathLike[str] | PathLike[bytes] - ) -> None: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(os.link, target, self) - - @classmethod - async def home(cls) -> Path: - home_path = await to_thread.run_sync(pathlib.Path.home) - return cls(home_path) - - def is_absolute(self) -> bool: - return self._path.is_absolute() - - async def is_block_device(self) -> bool: - return await to_thread.run_sync( - self._path.is_block_device, abandon_on_cancel=True - ) - - async def is_char_device(self) -> bool: - return await to_thread.run_sync( - self._path.is_char_device, abandon_on_cancel=True - ) - - async def is_dir(self) -> bool: - return await to_thread.run_sync(self._path.is_dir, abandon_on_cancel=True) - - async def is_fifo(self) -> bool: - return await to_thread.run_sync(self._path.is_fifo, abandon_on_cancel=True) - - async def is_file(self) -> bool: - return await to_thread.run_sync(self._path.is_file, abandon_on_cancel=True) - - async def is_mount(self) -> bool: - return await to_thread.run_sync( - os.path.ismount, self._path, abandon_on_cancel=True - ) - - def is_reserved(self) -> bool: - return self._path.is_reserved() - - async def is_socket(self) -> bool: - return await to_thread.run_sync(self._path.is_socket, abandon_on_cancel=True) - - async def is_symlink(self) -> bool: - return await to_thread.run_sync(self._path.is_symlink, abandon_on_cancel=True) - - def iterdir(self) -> AsyncIterator[Path]: - gen = self._path.iterdir() - return _PathIterator(gen) - - def joinpath(self, *args: str | PathLike[str]) -> Path: - return Path(self._path.joinpath(*args)) - - async def lchmod(self, mode: int) -> None: - await to_thread.run_sync(self._path.lchmod, mode) - - async def lstat(self) -> os.stat_result: - return await to_thread.run_sync(self._path.lstat, abandon_on_cancel=True) - - async def mkdir( - self, mode: int = 0o777, parents: bool = False, exist_ok: bool = False - ) -> None: - await to_thread.run_sync(self._path.mkdir, mode, parents, exist_ok) - - @overload - async def open( - self, - mode: OpenBinaryMode, - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - ) -> AsyncFile[bytes]: ... - - @overload - async def open( - self, - mode: OpenTextMode = ..., - buffering: int = ..., - encoding: str | None = ..., - errors: str | None = ..., - newline: str | None = ..., - ) -> AsyncFile[str]: ... - - async def open( - self, - mode: str = "r", - buffering: int = -1, - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - ) -> AsyncFile[Any]: - fp = await to_thread.run_sync( - self._path.open, mode, buffering, encoding, errors, newline - ) - return AsyncFile(fp) - - async def owner(self) -> str: - return await to_thread.run_sync(self._path.owner, abandon_on_cancel=True) - - async def read_bytes(self) -> bytes: - return await to_thread.run_sync(self._path.read_bytes) - - async def read_text( - self, encoding: str | None = None, errors: str | None = None - ) -> str: - return await to_thread.run_sync(self._path.read_text, encoding, errors) - - if sys.version_info >= (3, 12): - - def relative_to( - self, *other: str | PathLike[str], walk_up: bool = False - ) -> Path: - return Path(self._path.relative_to(*other, walk_up=walk_up)) - - else: - - def relative_to(self, *other: str | PathLike[str]) -> Path: - return Path(self._path.relative_to(*other)) - - async def readlink(self) -> Path: - target = await to_thread.run_sync(os.readlink, self._path) - return Path(target) - - async def rename(self, target: str | pathlib.PurePath | Path) -> Path: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(self._path.rename, target) - return Path(target) - - async def replace(self, target: str | pathlib.PurePath | Path) -> Path: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(self._path.replace, target) - return Path(target) - - async def resolve(self, strict: bool = False) -> Path: - func = partial(self._path.resolve, strict=strict) - return Path(await to_thread.run_sync(func, abandon_on_cancel=True)) - - def rglob(self, pattern: str) -> AsyncIterator[Path]: - gen = self._path.rglob(pattern) - return _PathIterator(gen) - - async def rmdir(self) -> None: - await to_thread.run_sync(self._path.rmdir) - - async def samefile(self, other_path: str | PathLike[str]) -> bool: - if isinstance(other_path, Path): - other_path = other_path._path - - return await to_thread.run_sync( - self._path.samefile, other_path, abandon_on_cancel=True - ) - - async def stat(self, *, follow_symlinks: bool = True) -> os.stat_result: - func = partial(os.stat, follow_symlinks=follow_symlinks) - return await to_thread.run_sync(func, self._path, abandon_on_cancel=True) - - async def symlink_to( - self, - target: str | bytes | PathLike[str] | PathLike[bytes], - target_is_directory: bool = False, - ) -> None: - if isinstance(target, Path): - target = target._path - - await to_thread.run_sync(self._path.symlink_to, target, target_is_directory) - - async def touch(self, mode: int = 0o666, exist_ok: bool = True) -> None: - await to_thread.run_sync(self._path.touch, mode, exist_ok) - - async def unlink(self, missing_ok: bool = False) -> None: - try: - await to_thread.run_sync(self._path.unlink) - except FileNotFoundError: - if not missing_ok: - raise - - if sys.version_info >= (3, 12): - - async def walk( - self, - top_down: bool = True, - on_error: Callable[[OSError], object] | None = None, - follow_symlinks: bool = False, - ) -> AsyncIterator[tuple[Path, list[str], list[str]]]: - def get_next_value() -> tuple[pathlib.Path, list[str], list[str]] | None: - try: - return next(gen) - except StopIteration: - return None - - gen = self._path.walk(top_down, on_error, follow_symlinks) - while True: - value = await to_thread.run_sync(get_next_value) - if value is None: - return - - root, dirs, paths = value - yield Path(root), dirs, paths - - def with_name(self, name: str) -> Path: - return Path(self._path.with_name(name)) - - def with_stem(self, stem: str) -> Path: - return Path(self._path.with_name(stem + self._path.suffix)) - - def with_suffix(self, suffix: str) -> Path: - return Path(self._path.with_suffix(suffix)) - - def with_segments(self, *pathsegments: str | PathLike[str]) -> Path: - return Path(*pathsegments) - - async def write_bytes(self, data: bytes) -> int: - return await to_thread.run_sync(self._path.write_bytes, data) - - async def write_text( - self, - data: str, - encoding: str | None = None, - errors: str | None = None, - newline: str | None = None, - ) -> int: - # Path.write_text() does not support the "newline" parameter before Python 3.10 - def sync_write_text() -> int: - with self._path.open( - "w", encoding=encoding, errors=errors, newline=newline - ) as fp: - return fp.write(data) - - return await to_thread.run_sync(sync_write_text) - - -PathLike.register(Path) diff --git a/myenv/Lib/site-packages/anyio/_core/_resources.py b/myenv/Lib/site-packages/anyio/_core/_resources.py deleted file mode 100644 index b9a5344..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_resources.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -from ..abc import AsyncResource -from ._tasks import CancelScope - - -async def aclose_forcefully(resource: AsyncResource) -> None: - """ - Close an asynchronous resource in a cancelled scope. - - Doing this closes the resource without waiting on anything. - - :param resource: the resource to close - - """ - with CancelScope() as scope: - scope.cancel() - await resource.aclose() diff --git a/myenv/Lib/site-packages/anyio/_core/_signals.py b/myenv/Lib/site-packages/anyio/_core/_signals.py deleted file mode 100644 index f3451d3..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_signals.py +++ /dev/null @@ -1,27 +0,0 @@ -from __future__ import annotations - -from collections.abc import AsyncIterator -from contextlib import AbstractContextManager -from signal import Signals - -from ._eventloop import get_async_backend - - -def open_signal_receiver( - *signals: Signals, -) -> AbstractContextManager[AsyncIterator[Signals]]: - """ - Start receiving operating system signals. - - :param signals: signals to receive (e.g. ``signal.SIGINT``) - :return: an asynchronous context manager for an asynchronous iterator which yields - signal numbers - - .. warning:: Windows does not support signals natively so it is best to avoid - relying on this in cross-platform applications. - - .. warning:: On asyncio, this permanently replaces any previous signal handler for - the given signals, as set via :meth:`~asyncio.loop.add_signal_handler`. - - """ - return get_async_backend().open_signal_receiver(*signals) diff --git a/myenv/Lib/site-packages/anyio/_core/_sockets.py b/myenv/Lib/site-packages/anyio/_core/_sockets.py deleted file mode 100644 index 6070c64..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_sockets.py +++ /dev/null @@ -1,718 +0,0 @@ -from __future__ import annotations - -import errno -import os -import socket -import ssl -import stat -import sys -from collections.abc import Awaitable -from ipaddress import IPv6Address, ip_address -from os import PathLike, chmod -from socket import AddressFamily, SocketKind -from typing import Any, Literal, cast, overload - -from .. import to_thread -from ..abc import ( - ConnectedUDPSocket, - ConnectedUNIXDatagramSocket, - IPAddressType, - IPSockAddrType, - SocketListener, - SocketStream, - UDPSocket, - UNIXDatagramSocket, - UNIXSocketStream, -) -from ..streams.stapled import MultiListener -from ..streams.tls import TLSStream -from ._eventloop import get_async_backend -from ._resources import aclose_forcefully -from ._synchronization import Event -from ._tasks import create_task_group, move_on_after - -if sys.version_info < (3, 11): - from exceptiongroup import ExceptionGroup - -IPPROTO_IPV6 = getattr(socket, "IPPROTO_IPV6", 41) # https://bugs.python.org/issue29515 - -AnyIPAddressFamily = Literal[ - AddressFamily.AF_UNSPEC, AddressFamily.AF_INET, AddressFamily.AF_INET6 -] -IPAddressFamily = Literal[AddressFamily.AF_INET, AddressFamily.AF_INET6] - - -# tls_hostname given -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - ssl_context: ssl.SSLContext | None = ..., - tls_standard_compatible: bool = ..., - tls_hostname: str, - happy_eyeballs_delay: float = ..., -) -> TLSStream: ... - - -# ssl_context given -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - ssl_context: ssl.SSLContext, - tls_standard_compatible: bool = ..., - tls_hostname: str | None = ..., - happy_eyeballs_delay: float = ..., -) -> TLSStream: ... - - -# tls=True -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - tls: Literal[True], - ssl_context: ssl.SSLContext | None = ..., - tls_standard_compatible: bool = ..., - tls_hostname: str | None = ..., - happy_eyeballs_delay: float = ..., -) -> TLSStream: ... - - -# tls=False -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - tls: Literal[False], - ssl_context: ssl.SSLContext | None = ..., - tls_standard_compatible: bool = ..., - tls_hostname: str | None = ..., - happy_eyeballs_delay: float = ..., -) -> SocketStream: ... - - -# No TLS arguments -@overload -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = ..., - happy_eyeballs_delay: float = ..., -) -> SocketStream: ... - - -async def connect_tcp( - remote_host: IPAddressType, - remote_port: int, - *, - local_host: IPAddressType | None = None, - tls: bool = False, - ssl_context: ssl.SSLContext | None = None, - tls_standard_compatible: bool = True, - tls_hostname: str | None = None, - happy_eyeballs_delay: float = 0.25, -) -> SocketStream | TLSStream: - """ - Connect to a host using the TCP protocol. - - This function implements the stateless version of the Happy Eyeballs algorithm (RFC - 6555). If ``remote_host`` is a host name that resolves to multiple IP addresses, - each one is tried until one connection attempt succeeds. If the first attempt does - not connected within 250 milliseconds, a second attempt is started using the next - address in the list, and so on. On IPv6 enabled systems, an IPv6 address (if - available) is tried first. - - When the connection has been established, a TLS handshake will be done if either - ``ssl_context`` or ``tls_hostname`` is not ``None``, or if ``tls`` is ``True``. - - :param remote_host: the IP address or host name to connect to - :param remote_port: port on the target host to connect to - :param local_host: the interface address or name to bind the socket to before - connecting - :param tls: ``True`` to do a TLS handshake with the connected stream and return a - :class:`~anyio.streams.tls.TLSStream` instead - :param ssl_context: the SSL context object to use (if omitted, a default context is - created) - :param tls_standard_compatible: If ``True``, performs the TLS shutdown handshake - before closing the stream and requires that the server does this as well. - Otherwise, :exc:`~ssl.SSLEOFError` may be raised during reads from the stream. - Some protocols, such as HTTP, require this option to be ``False``. - See :meth:`~ssl.SSLContext.wrap_socket` for details. - :param tls_hostname: host name to check the server certificate against (defaults to - the value of ``remote_host``) - :param happy_eyeballs_delay: delay (in seconds) before starting the next connection - attempt - :return: a socket stream object if no TLS handshake was done, otherwise a TLS stream - :raises OSError: if the connection attempt fails - - """ - # Placed here due to https://github.com/python/mypy/issues/7057 - connected_stream: SocketStream | None = None - - async def try_connect(remote_host: str, event: Event) -> None: - nonlocal connected_stream - try: - stream = await asynclib.connect_tcp(remote_host, remote_port, local_address) - except OSError as exc: - oserrors.append(exc) - return - else: - if connected_stream is None: - connected_stream = stream - tg.cancel_scope.cancel() - else: - await stream.aclose() - finally: - event.set() - - asynclib = get_async_backend() - local_address: IPSockAddrType | None = None - family = socket.AF_UNSPEC - if local_host: - gai_res = await getaddrinfo(str(local_host), None) - family, *_, local_address = gai_res[0] - - target_host = str(remote_host) - try: - addr_obj = ip_address(remote_host) - except ValueError: - # getaddrinfo() will raise an exception if name resolution fails - gai_res = await getaddrinfo( - target_host, remote_port, family=family, type=socket.SOCK_STREAM - ) - - # Organize the list so that the first address is an IPv6 address (if available) - # and the second one is an IPv4 addresses. The rest can be in whatever order. - v6_found = v4_found = False - target_addrs: list[tuple[socket.AddressFamily, str]] = [] - for af, *rest, sa in gai_res: - if af == socket.AF_INET6 and not v6_found: - v6_found = True - target_addrs.insert(0, (af, sa[0])) - elif af == socket.AF_INET and not v4_found and v6_found: - v4_found = True - target_addrs.insert(1, (af, sa[0])) - else: - target_addrs.append((af, sa[0])) - else: - if isinstance(addr_obj, IPv6Address): - target_addrs = [(socket.AF_INET6, addr_obj.compressed)] - else: - target_addrs = [(socket.AF_INET, addr_obj.compressed)] - - oserrors: list[OSError] = [] - async with create_task_group() as tg: - for i, (af, addr) in enumerate(target_addrs): - event = Event() - tg.start_soon(try_connect, addr, event) - with move_on_after(happy_eyeballs_delay): - await event.wait() - - if connected_stream is None: - cause = ( - oserrors[0] - if len(oserrors) == 1 - else ExceptionGroup("multiple connection attempts failed", oserrors) - ) - raise OSError("All connection attempts failed") from cause - - if tls or tls_hostname or ssl_context: - try: - return await TLSStream.wrap( - connected_stream, - server_side=False, - hostname=tls_hostname or str(remote_host), - ssl_context=ssl_context, - standard_compatible=tls_standard_compatible, - ) - except BaseException: - await aclose_forcefully(connected_stream) - raise - - return connected_stream - - -async def connect_unix(path: str | bytes | PathLike[Any]) -> UNIXSocketStream: - """ - Connect to the given UNIX socket. - - Not available on Windows. - - :param path: path to the socket - :return: a socket stream object - - """ - path = os.fspath(path) - return await get_async_backend().connect_unix(path) - - -async def create_tcp_listener( - *, - local_host: IPAddressType | None = None, - local_port: int = 0, - family: AnyIPAddressFamily = socket.AddressFamily.AF_UNSPEC, - backlog: int = 65536, - reuse_port: bool = False, -) -> MultiListener[SocketStream]: - """ - Create a TCP socket listener. - - :param local_port: port number to listen on - :param local_host: IP address of the interface to listen on. If omitted, listen on - all IPv4 and IPv6 interfaces. To listen on all interfaces on a specific address - family, use ``0.0.0.0`` for IPv4 or ``::`` for IPv6. - :param family: address family (used if ``local_host`` was omitted) - :param backlog: maximum number of queued incoming connections (up to a maximum of - 2**16, or 65536) - :param reuse_port: ``True`` to allow multiple sockets to bind to the same - address/port (not supported on Windows) - :return: a list of listener objects - - """ - asynclib = get_async_backend() - backlog = min(backlog, 65536) - local_host = str(local_host) if local_host is not None else None - gai_res = await getaddrinfo( - local_host, - local_port, - family=family, - type=socket.SocketKind.SOCK_STREAM if sys.platform == "win32" else 0, - flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, - ) - listeners: list[SocketListener] = [] - try: - # The set() is here to work around a glibc bug: - # https://sourceware.org/bugzilla/show_bug.cgi?id=14969 - sockaddr: tuple[str, int] | tuple[str, int, int, int] - for fam, kind, *_, sockaddr in sorted(set(gai_res)): - # Workaround for an uvloop bug where we don't get the correct scope ID for - # IPv6 link-local addresses when passing type=socket.SOCK_STREAM to - # getaddrinfo(): https://github.com/MagicStack/uvloop/issues/539 - if sys.platform != "win32" and kind is not SocketKind.SOCK_STREAM: - continue - - raw_socket = socket.socket(fam) - raw_socket.setblocking(False) - - # For Windows, enable exclusive address use. For others, enable address - # reuse. - if sys.platform == "win32": - raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_EXCLUSIVEADDRUSE, 1) - else: - raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - - if reuse_port: - raw_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) - - # If only IPv6 was requested, disable dual stack operation - if fam == socket.AF_INET6: - raw_socket.setsockopt(IPPROTO_IPV6, socket.IPV6_V6ONLY, 1) - - # Workaround for #554 - if "%" in sockaddr[0]: - addr, scope_id = sockaddr[0].split("%", 1) - sockaddr = (addr, sockaddr[1], 0, int(scope_id)) - - raw_socket.bind(sockaddr) - raw_socket.listen(backlog) - listener = asynclib.create_tcp_listener(raw_socket) - listeners.append(listener) - except BaseException: - for listener in listeners: - await listener.aclose() - - raise - - return MultiListener(listeners) - - -async def create_unix_listener( - path: str | bytes | PathLike[Any], - *, - mode: int | None = None, - backlog: int = 65536, -) -> SocketListener: - """ - Create a UNIX socket listener. - - Not available on Windows. - - :param path: path of the socket - :param mode: permissions to set on the socket - :param backlog: maximum number of queued incoming connections (up to a maximum of - 2**16, or 65536) - :return: a listener object - - .. versionchanged:: 3.0 - If a socket already exists on the file system in the given path, it will be - removed first. - - """ - backlog = min(backlog, 65536) - raw_socket = await setup_unix_local_socket(path, mode, socket.SOCK_STREAM) - try: - raw_socket.listen(backlog) - return get_async_backend().create_unix_listener(raw_socket) - except BaseException: - raw_socket.close() - raise - - -async def create_udp_socket( - family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, - *, - local_host: IPAddressType | None = None, - local_port: int = 0, - reuse_port: bool = False, -) -> UDPSocket: - """ - Create a UDP socket. - - If ``port`` has been given, the socket will be bound to this port on the local - machine, making this socket suitable for providing UDP based services. - - :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically - determined from ``local_host`` if omitted - :param local_host: IP address or host name of the local interface to bind to - :param local_port: local port to bind to - :param reuse_port: ``True`` to allow multiple sockets to bind to the same - address/port (not supported on Windows) - :return: a UDP socket - - """ - if family is AddressFamily.AF_UNSPEC and not local_host: - raise ValueError('Either "family" or "local_host" must be given') - - if local_host: - gai_res = await getaddrinfo( - str(local_host), - local_port, - family=family, - type=socket.SOCK_DGRAM, - flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, - ) - family = cast(AnyIPAddressFamily, gai_res[0][0]) - local_address = gai_res[0][-1] - elif family is AddressFamily.AF_INET6: - local_address = ("::", 0) - else: - local_address = ("0.0.0.0", 0) - - sock = await get_async_backend().create_udp_socket( - family, local_address, None, reuse_port - ) - return cast(UDPSocket, sock) - - -async def create_connected_udp_socket( - remote_host: IPAddressType, - remote_port: int, - *, - family: AnyIPAddressFamily = AddressFamily.AF_UNSPEC, - local_host: IPAddressType | None = None, - local_port: int = 0, - reuse_port: bool = False, -) -> ConnectedUDPSocket: - """ - Create a connected UDP socket. - - Connected UDP sockets can only communicate with the specified remote host/port, an - any packets sent from other sources are dropped. - - :param remote_host: remote host to set as the default target - :param remote_port: port on the remote host to set as the default target - :param family: address family (``AF_INET`` or ``AF_INET6``) – automatically - determined from ``local_host`` or ``remote_host`` if omitted - :param local_host: IP address or host name of the local interface to bind to - :param local_port: local port to bind to - :param reuse_port: ``True`` to allow multiple sockets to bind to the same - address/port (not supported on Windows) - :return: a connected UDP socket - - """ - local_address = None - if local_host: - gai_res = await getaddrinfo( - str(local_host), - local_port, - family=family, - type=socket.SOCK_DGRAM, - flags=socket.AI_PASSIVE | socket.AI_ADDRCONFIG, - ) - family = cast(AnyIPAddressFamily, gai_res[0][0]) - local_address = gai_res[0][-1] - - gai_res = await getaddrinfo( - str(remote_host), remote_port, family=family, type=socket.SOCK_DGRAM - ) - family = cast(AnyIPAddressFamily, gai_res[0][0]) - remote_address = gai_res[0][-1] - - sock = await get_async_backend().create_udp_socket( - family, local_address, remote_address, reuse_port - ) - return cast(ConnectedUDPSocket, sock) - - -async def create_unix_datagram_socket( - *, - local_path: None | str | bytes | PathLike[Any] = None, - local_mode: int | None = None, -) -> UNIXDatagramSocket: - """ - Create a UNIX datagram socket. - - Not available on Windows. - - If ``local_path`` has been given, the socket will be bound to this path, making this - socket suitable for receiving datagrams from other processes. Other processes can - send datagrams to this socket only if ``local_path`` is set. - - If a socket already exists on the file system in the ``local_path``, it will be - removed first. - - :param local_path: the path on which to bind to - :param local_mode: permissions to set on the local socket - :return: a UNIX datagram socket - - """ - raw_socket = await setup_unix_local_socket( - local_path, local_mode, socket.SOCK_DGRAM - ) - return await get_async_backend().create_unix_datagram_socket(raw_socket, None) - - -async def create_connected_unix_datagram_socket( - remote_path: str | bytes | PathLike[Any], - *, - local_path: None | str | bytes | PathLike[Any] = None, - local_mode: int | None = None, -) -> ConnectedUNIXDatagramSocket: - """ - Create a connected UNIX datagram socket. - - Connected datagram sockets can only communicate with the specified remote path. - - If ``local_path`` has been given, the socket will be bound to this path, making - this socket suitable for receiving datagrams from other processes. Other processes - can send datagrams to this socket only if ``local_path`` is set. - - If a socket already exists on the file system in the ``local_path``, it will be - removed first. - - :param remote_path: the path to set as the default target - :param local_path: the path on which to bind to - :param local_mode: permissions to set on the local socket - :return: a connected UNIX datagram socket - - """ - remote_path = os.fspath(remote_path) - raw_socket = await setup_unix_local_socket( - local_path, local_mode, socket.SOCK_DGRAM - ) - return await get_async_backend().create_unix_datagram_socket( - raw_socket, remote_path - ) - - -async def getaddrinfo( - host: bytes | str | None, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, -) -> list[tuple[AddressFamily, SocketKind, int, str, tuple[str, int]]]: - """ - Look up a numeric IP address given a host name. - - Internationalized domain names are translated according to the (non-transitional) - IDNA 2008 standard. - - .. note:: 4-tuple IPv6 socket addresses are automatically converted to 2-tuples of - (host, port), unlike what :func:`socket.getaddrinfo` does. - - :param host: host name - :param port: port number - :param family: socket family (`'AF_INET``, ...) - :param type: socket type (``SOCK_STREAM``, ...) - :param proto: protocol number - :param flags: flags to pass to upstream ``getaddrinfo()`` - :return: list of tuples containing (family, type, proto, canonname, sockaddr) - - .. seealso:: :func:`socket.getaddrinfo` - - """ - # Handle unicode hostnames - if isinstance(host, str): - try: - encoded_host: bytes | None = host.encode("ascii") - except UnicodeEncodeError: - import idna - - encoded_host = idna.encode(host, uts46=True) - else: - encoded_host = host - - gai_res = await get_async_backend().getaddrinfo( - encoded_host, port, family=family, type=type, proto=proto, flags=flags - ) - return [ - (family, type, proto, canonname, convert_ipv6_sockaddr(sockaddr)) - for family, type, proto, canonname, sockaddr in gai_res - ] - - -def getnameinfo(sockaddr: IPSockAddrType, flags: int = 0) -> Awaitable[tuple[str, str]]: - """ - Look up the host name of an IP address. - - :param sockaddr: socket address (e.g. (ipaddress, port) for IPv4) - :param flags: flags to pass to upstream ``getnameinfo()`` - :return: a tuple of (host name, service name) - - .. seealso:: :func:`socket.getnameinfo` - - """ - return get_async_backend().getnameinfo(sockaddr, flags) - - -def wait_socket_readable(sock: socket.socket) -> Awaitable[None]: - """ - Wait until the given socket has data to be read. - - This does **NOT** work on Windows when using the asyncio backend with a proactor - event loop (default on py3.8+). - - .. warning:: Only use this on raw sockets that have not been wrapped by any higher - level constructs like socket streams! - - :param sock: a socket object - :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the - socket to become readable - :raises ~anyio.BusyResourceError: if another task is already waiting for the socket - to become readable - - """ - return get_async_backend().wait_socket_readable(sock) - - -def wait_socket_writable(sock: socket.socket) -> Awaitable[None]: - """ - Wait until the given socket can be written to. - - This does **NOT** work on Windows when using the asyncio backend with a proactor - event loop (default on py3.8+). - - .. warning:: Only use this on raw sockets that have not been wrapped by any higher - level constructs like socket streams! - - :param sock: a socket object - :raises ~anyio.ClosedResourceError: if the socket was closed while waiting for the - socket to become writable - :raises ~anyio.BusyResourceError: if another task is already waiting for the socket - to become writable - - """ - return get_async_backend().wait_socket_writable(sock) - - -# -# Private API -# - - -def convert_ipv6_sockaddr( - sockaddr: tuple[str, int, int, int] | tuple[str, int], -) -> tuple[str, int]: - """ - Convert a 4-tuple IPv6 socket address to a 2-tuple (address, port) format. - - If the scope ID is nonzero, it is added to the address, separated with ``%``. - Otherwise the flow id and scope id are simply cut off from the tuple. - Any other kinds of socket addresses are returned as-is. - - :param sockaddr: the result of :meth:`~socket.socket.getsockname` - :return: the converted socket address - - """ - # This is more complicated than it should be because of MyPy - if isinstance(sockaddr, tuple) and len(sockaddr) == 4: - host, port, flowinfo, scope_id = sockaddr - if scope_id: - # PyPy (as of v7.3.11) leaves the interface name in the result, so - # we discard it and only get the scope ID from the end - # (https://foss.heptapod.net/pypy/pypy/-/issues/3938) - host = host.split("%")[0] - - # Add scope_id to the address - return f"{host}%{scope_id}", port - else: - return host, port - else: - return sockaddr - - -async def setup_unix_local_socket( - path: None | str | bytes | PathLike[Any], - mode: int | None, - socktype: int, -) -> socket.socket: - """ - Create a UNIX local socket object, deleting the socket at the given path if it - exists. - - Not available on Windows. - - :param path: path of the socket - :param mode: permissions to set on the socket - :param socktype: socket.SOCK_STREAM or socket.SOCK_DGRAM - - """ - path_str: str | None - if path is not None: - path_str = os.fsdecode(path) - - # Linux abstract namespace sockets aren't backed by a concrete file so skip stat call - if not path_str.startswith("\0"): - # Copied from pathlib... - try: - stat_result = os.stat(path) - except OSError as e: - if e.errno not in ( - errno.ENOENT, - errno.ENOTDIR, - errno.EBADF, - errno.ELOOP, - ): - raise - else: - if stat.S_ISSOCK(stat_result.st_mode): - os.unlink(path) - else: - path_str = None - - raw_socket = socket.socket(socket.AF_UNIX, socktype) - raw_socket.setblocking(False) - - if path_str is not None: - try: - await to_thread.run_sync(raw_socket.bind, path_str, abandon_on_cancel=True) - if mode is not None: - await to_thread.run_sync(chmod, path_str, mode, abandon_on_cancel=True) - except BaseException: - raw_socket.close() - raise - - return raw_socket diff --git a/myenv/Lib/site-packages/anyio/_core/_streams.py b/myenv/Lib/site-packages/anyio/_core/_streams.py deleted file mode 100644 index 6a9814e..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_streams.py +++ /dev/null @@ -1,52 +0,0 @@ -from __future__ import annotations - -import math -from typing import TypeVar -from warnings import warn - -from ..streams.memory import ( - MemoryObjectReceiveStream, - MemoryObjectSendStream, - MemoryObjectStreamState, -) - -T_Item = TypeVar("T_Item") - - -class create_memory_object_stream( - tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]], -): - """ - Create a memory object stream. - - The stream's item type can be annotated like - :func:`create_memory_object_stream[T_Item]`. - - :param max_buffer_size: number of items held in the buffer until ``send()`` starts - blocking - :param item_type: old way of marking the streams with the right generic type for - static typing (does nothing on AnyIO 4) - - .. deprecated:: 4.0 - Use ``create_memory_object_stream[YourItemType](...)`` instead. - :return: a tuple of (send stream, receive stream) - - """ - - def __new__( # type: ignore[misc] - cls, max_buffer_size: float = 0, item_type: object = None - ) -> tuple[MemoryObjectSendStream[T_Item], MemoryObjectReceiveStream[T_Item]]: - if max_buffer_size != math.inf and not isinstance(max_buffer_size, int): - raise ValueError("max_buffer_size must be either an integer or math.inf") - if max_buffer_size < 0: - raise ValueError("max_buffer_size cannot be negative") - if item_type is not None: - warn( - "The item_type argument has been deprecated in AnyIO 4.0. " - "Use create_memory_object_stream[YourItemType](...) instead.", - DeprecationWarning, - stacklevel=2, - ) - - state = MemoryObjectStreamState[T_Item](max_buffer_size) - return (MemoryObjectSendStream(state), MemoryObjectReceiveStream(state)) diff --git a/myenv/Lib/site-packages/anyio/_core/_subprocesses.py b/myenv/Lib/site-packages/anyio/_core/_subprocesses.py deleted file mode 100644 index 7ba41a5..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_subprocesses.py +++ /dev/null @@ -1,196 +0,0 @@ -from __future__ import annotations - -import sys -from collections.abc import AsyncIterable, Iterable, Mapping, Sequence -from io import BytesIO -from os import PathLike -from subprocess import DEVNULL, PIPE, CalledProcessError, CompletedProcess -from typing import IO, Any, Union, cast - -from ..abc import Process -from ._eventloop import get_async_backend -from ._tasks import create_task_group - -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - -StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] - - -async def run_process( - command: StrOrBytesPath | Sequence[StrOrBytesPath], - *, - input: bytes | None = None, - stdout: int | IO[Any] | None = PIPE, - stderr: int | IO[Any] | None = PIPE, - check: bool = True, - cwd: StrOrBytesPath | None = None, - env: Mapping[str, str] | None = None, - startupinfo: Any = None, - creationflags: int = 0, - start_new_session: bool = False, - pass_fds: Sequence[int] = (), - user: str | int | None = None, - group: str | int | None = None, - extra_groups: Iterable[str | int] | None = None, - umask: int = -1, -) -> CompletedProcess[bytes]: - """ - Run an external command in a subprocess and wait until it completes. - - .. seealso:: :func:`subprocess.run` - - :param command: either a string to pass to the shell, or an iterable of strings - containing the executable name or path and its arguments - :param input: bytes passed to the standard input of the subprocess - :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - a file-like object, or `None` - :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - :data:`subprocess.STDOUT`, a file-like object, or `None` - :param check: if ``True``, raise :exc:`~subprocess.CalledProcessError` if the - process terminates with a return code other than 0 - :param cwd: If not ``None``, change the working directory to this before running the - command - :param env: if not ``None``, this mapping replaces the inherited environment - variables from the parent process - :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used - to specify process startup parameters (Windows only) - :param creationflags: flags that can be used to control the creation of the - subprocess (see :class:`subprocess.Popen` for the specifics) - :param start_new_session: if ``true`` the setsid() system call will be made in the - child process prior to the execution of the subprocess. (POSIX only) - :param pass_fds: sequence of file descriptors to keep open between the parent and - child processes. (POSIX only) - :param user: effective user to run the process as (Python >= 3.9, POSIX only) - :param group: effective group to run the process as (Python >= 3.9, POSIX only) - :param extra_groups: supplementary groups to set in the subprocess (Python >= 3.9, - POSIX only) - :param umask: if not negative, this umask is applied in the child process before - running the given command (Python >= 3.9, POSIX only) - :return: an object representing the completed process - :raises ~subprocess.CalledProcessError: if ``check`` is ``True`` and the process - exits with a nonzero return code - - """ - - async def drain_stream(stream: AsyncIterable[bytes], index: int) -> None: - buffer = BytesIO() - async for chunk in stream: - buffer.write(chunk) - - stream_contents[index] = buffer.getvalue() - - async with await open_process( - command, - stdin=PIPE if input else DEVNULL, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - startupinfo=startupinfo, - creationflags=creationflags, - start_new_session=start_new_session, - pass_fds=pass_fds, - user=user, - group=group, - extra_groups=extra_groups, - umask=umask, - ) as process: - stream_contents: list[bytes | None] = [None, None] - async with create_task_group() as tg: - if process.stdout: - tg.start_soon(drain_stream, process.stdout, 0) - - if process.stderr: - tg.start_soon(drain_stream, process.stderr, 1) - - if process.stdin and input: - await process.stdin.send(input) - await process.stdin.aclose() - - await process.wait() - - output, errors = stream_contents - if check and process.returncode != 0: - raise CalledProcessError(cast(int, process.returncode), command, output, errors) - - return CompletedProcess(command, cast(int, process.returncode), output, errors) - - -async def open_process( - command: StrOrBytesPath | Sequence[StrOrBytesPath], - *, - stdin: int | IO[Any] | None = PIPE, - stdout: int | IO[Any] | None = PIPE, - stderr: int | IO[Any] | None = PIPE, - cwd: StrOrBytesPath | None = None, - env: Mapping[str, str] | None = None, - startupinfo: Any = None, - creationflags: int = 0, - start_new_session: bool = False, - pass_fds: Sequence[int] = (), - user: str | int | None = None, - group: str | int | None = None, - extra_groups: Iterable[str | int] | None = None, - umask: int = -1, -) -> Process: - """ - Start an external command in a subprocess. - - .. seealso:: :class:`subprocess.Popen` - - :param command: either a string to pass to the shell, or an iterable of strings - containing the executable name or path and its arguments - :param stdin: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, a - file-like object, or ``None`` - :param stdout: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - a file-like object, or ``None`` - :param stderr: one of :data:`subprocess.PIPE`, :data:`subprocess.DEVNULL`, - :data:`subprocess.STDOUT`, a file-like object, or ``None`` - :param cwd: If not ``None``, the working directory is changed before executing - :param env: If env is not ``None``, it must be a mapping that defines the - environment variables for the new process - :param creationflags: flags that can be used to control the creation of the - subprocess (see :class:`subprocess.Popen` for the specifics) - :param startupinfo: an instance of :class:`subprocess.STARTUPINFO` that can be used - to specify process startup parameters (Windows only) - :param start_new_session: if ``true`` the setsid() system call will be made in the - child process prior to the execution of the subprocess. (POSIX only) - :param pass_fds: sequence of file descriptors to keep open between the parent and - child processes. (POSIX only) - :param user: effective user to run the process as (POSIX only) - :param group: effective group to run the process as (POSIX only) - :param extra_groups: supplementary groups to set in the subprocess (POSIX only) - :param umask: if not negative, this umask is applied in the child process before - running the given command (POSIX only) - :return: an asynchronous process object - - """ - kwargs: dict[str, Any] = {} - if user is not None: - kwargs["user"] = user - - if group is not None: - kwargs["group"] = group - - if extra_groups is not None: - kwargs["extra_groups"] = group - - if umask >= 0: - kwargs["umask"] = umask - - return await get_async_backend().open_process( - command, - stdin=stdin, - stdout=stdout, - stderr=stderr, - cwd=cwd, - env=env, - startupinfo=startupinfo, - creationflags=creationflags, - start_new_session=start_new_session, - pass_fds=pass_fds, - **kwargs, - ) diff --git a/myenv/Lib/site-packages/anyio/_core/_synchronization.py b/myenv/Lib/site-packages/anyio/_core/_synchronization.py deleted file mode 100644 index 023ab73..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_synchronization.py +++ /dev/null @@ -1,724 +0,0 @@ -from __future__ import annotations - -import math -from collections import deque -from dataclasses import dataclass -from types import TracebackType - -from sniffio import AsyncLibraryNotFoundError - -from ..lowlevel import checkpoint -from ._eventloop import get_async_backend -from ._exceptions import BusyResourceError -from ._tasks import CancelScope -from ._testing import TaskInfo, get_current_task - - -@dataclass(frozen=True) -class EventStatistics: - """ - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Event.wait` - """ - - tasks_waiting: int - - -@dataclass(frozen=True) -class CapacityLimiterStatistics: - """ - :ivar int borrowed_tokens: number of tokens currently borrowed by tasks - :ivar float total_tokens: total number of available tokens - :ivar tuple borrowers: tasks or other objects currently holding tokens borrowed from - this limiter - :ivar int tasks_waiting: number of tasks waiting on - :meth:`~.CapacityLimiter.acquire` or - :meth:`~.CapacityLimiter.acquire_on_behalf_of` - """ - - borrowed_tokens: int - total_tokens: float - borrowers: tuple[object, ...] - tasks_waiting: int - - -@dataclass(frozen=True) -class LockStatistics: - """ - :ivar bool locked: flag indicating if this lock is locked or not - :ivar ~anyio.TaskInfo owner: task currently holding the lock (or ``None`` if the - lock is not held by any task) - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Lock.acquire` - """ - - locked: bool - owner: TaskInfo | None - tasks_waiting: int - - -@dataclass(frozen=True) -class ConditionStatistics: - """ - :ivar int tasks_waiting: number of tasks blocked on :meth:`~.Condition.wait` - :ivar ~anyio.LockStatistics lock_statistics: statistics of the underlying - :class:`~.Lock` - """ - - tasks_waiting: int - lock_statistics: LockStatistics - - -@dataclass(frozen=True) -class SemaphoreStatistics: - """ - :ivar int tasks_waiting: number of tasks waiting on :meth:`~.Semaphore.acquire` - - """ - - tasks_waiting: int - - -class Event: - def __new__(cls) -> Event: - try: - return get_async_backend().create_event() - except AsyncLibraryNotFoundError: - return EventAdapter() - - def set(self) -> None: - """Set the flag, notifying all listeners.""" - raise NotImplementedError - - def is_set(self) -> bool: - """Return ``True`` if the flag is set, ``False`` if not.""" - raise NotImplementedError - - async def wait(self) -> None: - """ - Wait until the flag has been set. - - If the flag has already been set when this method is called, it returns - immediately. - - """ - raise NotImplementedError - - def statistics(self) -> EventStatistics: - """Return statistics about the current state of this event.""" - raise NotImplementedError - - -class EventAdapter(Event): - _internal_event: Event | None = None - - def __new__(cls) -> EventAdapter: - return object.__new__(cls) - - @property - def _event(self) -> Event: - if self._internal_event is None: - self._internal_event = get_async_backend().create_event() - - return self._internal_event - - def set(self) -> None: - self._event.set() - - def is_set(self) -> bool: - return self._internal_event is not None and self._internal_event.is_set() - - async def wait(self) -> None: - await self._event.wait() - - def statistics(self) -> EventStatistics: - if self._internal_event is None: - return EventStatistics(tasks_waiting=0) - - return self._internal_event.statistics() - - -class Lock: - def __new__(cls, *, fast_acquire: bool = False) -> Lock: - try: - return get_async_backend().create_lock(fast_acquire=fast_acquire) - except AsyncLibraryNotFoundError: - return LockAdapter(fast_acquire=fast_acquire) - - async def __aenter__(self) -> None: - await self.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - async def acquire(self) -> None: - """Acquire the lock.""" - raise NotImplementedError - - def acquire_nowait(self) -> None: - """ - Acquire the lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - raise NotImplementedError - - def release(self) -> None: - """Release the lock.""" - raise NotImplementedError - - def locked(self) -> bool: - """Return True if the lock is currently held.""" - raise NotImplementedError - - def statistics(self) -> LockStatistics: - """ - Return statistics about the current state of this lock. - - .. versionadded:: 3.0 - """ - raise NotImplementedError - - -class LockAdapter(Lock): - _internal_lock: Lock | None = None - - def __new__(cls, *, fast_acquire: bool = False) -> LockAdapter: - return object.__new__(cls) - - def __init__(self, *, fast_acquire: bool = False): - self._fast_acquire = fast_acquire - - @property - def _lock(self) -> Lock: - if self._internal_lock is None: - self._internal_lock = get_async_backend().create_lock( - fast_acquire=self._fast_acquire - ) - - return self._internal_lock - - async def __aenter__(self) -> None: - await self._lock.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - if self._internal_lock is not None: - self._internal_lock.release() - - async def acquire(self) -> None: - """Acquire the lock.""" - await self._lock.acquire() - - def acquire_nowait(self) -> None: - """ - Acquire the lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - self._lock.acquire_nowait() - - def release(self) -> None: - """Release the lock.""" - self._lock.release() - - def locked(self) -> bool: - """Return True if the lock is currently held.""" - return self._lock.locked() - - def statistics(self) -> LockStatistics: - """ - Return statistics about the current state of this lock. - - .. versionadded:: 3.0 - - """ - if self._internal_lock is None: - return LockStatistics(False, None, 0) - - return self._internal_lock.statistics() - - -class Condition: - _owner_task: TaskInfo | None = None - - def __init__(self, lock: Lock | None = None): - self._lock = lock or Lock() - self._waiters: deque[Event] = deque() - - async def __aenter__(self) -> None: - await self.acquire() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - def _check_acquired(self) -> None: - if self._owner_task != get_current_task(): - raise RuntimeError("The current task is not holding the underlying lock") - - async def acquire(self) -> None: - """Acquire the underlying lock.""" - await self._lock.acquire() - self._owner_task = get_current_task() - - def acquire_nowait(self) -> None: - """ - Acquire the underlying lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - self._lock.acquire_nowait() - self._owner_task = get_current_task() - - def release(self) -> None: - """Release the underlying lock.""" - self._lock.release() - - def locked(self) -> bool: - """Return True if the lock is set.""" - return self._lock.locked() - - def notify(self, n: int = 1) -> None: - """Notify exactly n listeners.""" - self._check_acquired() - for _ in range(n): - try: - event = self._waiters.popleft() - except IndexError: - break - - event.set() - - def notify_all(self) -> None: - """Notify all the listeners.""" - self._check_acquired() - for event in self._waiters: - event.set() - - self._waiters.clear() - - async def wait(self) -> None: - """Wait for a notification.""" - await checkpoint() - event = Event() - self._waiters.append(event) - self.release() - try: - await event.wait() - except BaseException: - if not event.is_set(): - self._waiters.remove(event) - - raise - finally: - with CancelScope(shield=True): - await self.acquire() - - def statistics(self) -> ConditionStatistics: - """ - Return statistics about the current state of this condition. - - .. versionadded:: 3.0 - """ - return ConditionStatistics(len(self._waiters), self._lock.statistics()) - - -class Semaphore: - def __new__( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> Semaphore: - try: - return get_async_backend().create_semaphore( - initial_value, max_value=max_value, fast_acquire=fast_acquire - ) - except AsyncLibraryNotFoundError: - return SemaphoreAdapter(initial_value, max_value=max_value) - - def __init__( - self, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ): - if not isinstance(initial_value, int): - raise TypeError("initial_value must be an integer") - if initial_value < 0: - raise ValueError("initial_value must be >= 0") - if max_value is not None: - if not isinstance(max_value, int): - raise TypeError("max_value must be an integer or None") - if max_value < initial_value: - raise ValueError( - "max_value must be equal to or higher than initial_value" - ) - - self._fast_acquire = fast_acquire - - async def __aenter__(self) -> Semaphore: - await self.acquire() - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.release() - - async def acquire(self) -> None: - """Decrement the semaphore value, blocking if necessary.""" - raise NotImplementedError - - def acquire_nowait(self) -> None: - """ - Acquire the underlying lock, without blocking. - - :raises ~anyio.WouldBlock: if the operation would block - - """ - raise NotImplementedError - - def release(self) -> None: - """Increment the semaphore value.""" - raise NotImplementedError - - @property - def value(self) -> int: - """The current value of the semaphore.""" - raise NotImplementedError - - @property - def max_value(self) -> int | None: - """The maximum value of the semaphore.""" - raise NotImplementedError - - def statistics(self) -> SemaphoreStatistics: - """ - Return statistics about the current state of this semaphore. - - .. versionadded:: 3.0 - """ - raise NotImplementedError - - -class SemaphoreAdapter(Semaphore): - _internal_semaphore: Semaphore | None = None - - def __new__( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> SemaphoreAdapter: - return object.__new__(cls) - - def __init__( - self, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> None: - super().__init__(initial_value, max_value=max_value, fast_acquire=fast_acquire) - self._initial_value = initial_value - self._max_value = max_value - - @property - def _semaphore(self) -> Semaphore: - if self._internal_semaphore is None: - self._internal_semaphore = get_async_backend().create_semaphore( - self._initial_value, max_value=self._max_value - ) - - return self._internal_semaphore - - async def acquire(self) -> None: - await self._semaphore.acquire() - - def acquire_nowait(self) -> None: - self._semaphore.acquire_nowait() - - def release(self) -> None: - self._semaphore.release() - - @property - def value(self) -> int: - if self._internal_semaphore is None: - return self._initial_value - - return self._semaphore.value - - @property - def max_value(self) -> int | None: - return self._max_value - - def statistics(self) -> SemaphoreStatistics: - if self._internal_semaphore is None: - return SemaphoreStatistics(tasks_waiting=0) - - return self._semaphore.statistics() - - -class CapacityLimiter: - def __new__(cls, total_tokens: float) -> CapacityLimiter: - try: - return get_async_backend().create_capacity_limiter(total_tokens) - except AsyncLibraryNotFoundError: - return CapacityLimiterAdapter(total_tokens) - - async def __aenter__(self) -> None: - raise NotImplementedError - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - raise NotImplementedError - - @property - def total_tokens(self) -> float: - """ - The total number of tokens available for borrowing. - - This is a read-write property. If the total number of tokens is increased, the - proportionate number of tasks waiting on this limiter will be granted their - tokens. - - .. versionchanged:: 3.0 - The property is now writable. - - """ - raise NotImplementedError - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - raise NotImplementedError - - @property - def borrowed_tokens(self) -> int: - """The number of tokens that have currently been borrowed.""" - raise NotImplementedError - - @property - def available_tokens(self) -> float: - """The number of tokens currently available to be borrowed""" - raise NotImplementedError - - def acquire_nowait(self) -> None: - """ - Acquire a token for the current task without waiting for one to become - available. - - :raises ~anyio.WouldBlock: if there are no tokens available for borrowing - - """ - raise NotImplementedError - - def acquire_on_behalf_of_nowait(self, borrower: object) -> None: - """ - Acquire a token without waiting for one to become available. - - :param borrower: the entity borrowing a token - :raises ~anyio.WouldBlock: if there are no tokens available for borrowing - - """ - raise NotImplementedError - - async def acquire(self) -> None: - """ - Acquire a token for the current task, waiting if necessary for one to become - available. - - """ - raise NotImplementedError - - async def acquire_on_behalf_of(self, borrower: object) -> None: - """ - Acquire a token, waiting if necessary for one to become available. - - :param borrower: the entity borrowing a token - - """ - raise NotImplementedError - - def release(self) -> None: - """ - Release the token held by the current task. - - :raises RuntimeError: if the current task has not borrowed a token from this - limiter. - - """ - raise NotImplementedError - - def release_on_behalf_of(self, borrower: object) -> None: - """ - Release the token held by the given borrower. - - :raises RuntimeError: if the borrower has not borrowed a token from this - limiter. - - """ - raise NotImplementedError - - def statistics(self) -> CapacityLimiterStatistics: - """ - Return statistics about the current state of this limiter. - - .. versionadded:: 3.0 - - """ - raise NotImplementedError - - -class CapacityLimiterAdapter(CapacityLimiter): - _internal_limiter: CapacityLimiter | None = None - - def __new__(cls, total_tokens: float) -> CapacityLimiterAdapter: - return object.__new__(cls) - - def __init__(self, total_tokens: float) -> None: - self.total_tokens = total_tokens - - @property - def _limiter(self) -> CapacityLimiter: - if self._internal_limiter is None: - self._internal_limiter = get_async_backend().create_capacity_limiter( - self._total_tokens - ) - - return self._internal_limiter - - async def __aenter__(self) -> None: - await self._limiter.__aenter__() - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - return await self._limiter.__aexit__(exc_type, exc_val, exc_tb) - - @property - def total_tokens(self) -> float: - if self._internal_limiter is None: - return self._total_tokens - - return self._internal_limiter.total_tokens - - @total_tokens.setter - def total_tokens(self, value: float) -> None: - if not isinstance(value, int) and value is not math.inf: - raise TypeError("total_tokens must be an int or math.inf") - elif value < 1: - raise ValueError("total_tokens must be >= 1") - - if self._internal_limiter is None: - self._total_tokens = value - return - - self._limiter.total_tokens = value - - @property - def borrowed_tokens(self) -> int: - if self._internal_limiter is None: - return 0 - - return self._internal_limiter.borrowed_tokens - - @property - def available_tokens(self) -> float: - if self._internal_limiter is None: - return self._total_tokens - - return self._internal_limiter.available_tokens - - def acquire_nowait(self) -> None: - self._limiter.acquire_nowait() - - def acquire_on_behalf_of_nowait(self, borrower: object) -> None: - self._limiter.acquire_on_behalf_of_nowait(borrower) - - async def acquire(self) -> None: - await self._limiter.acquire() - - async def acquire_on_behalf_of(self, borrower: object) -> None: - await self._limiter.acquire_on_behalf_of(borrower) - - def release(self) -> None: - self._limiter.release() - - def release_on_behalf_of(self, borrower: object) -> None: - self._limiter.release_on_behalf_of(borrower) - - def statistics(self) -> CapacityLimiterStatistics: - if self._internal_limiter is None: - return CapacityLimiterStatistics( - borrowed_tokens=0, - total_tokens=self.total_tokens, - borrowers=(), - tasks_waiting=0, - ) - - return self._internal_limiter.statistics() - - -class ResourceGuard: - """ - A context manager for ensuring that a resource is only used by a single task at a - time. - - Entering this context manager while the previous has not exited it yet will trigger - :exc:`BusyResourceError`. - - :param action: the action to guard against (visible in the :exc:`BusyResourceError` - when triggered, e.g. "Another task is already {action} this resource") - - .. versionadded:: 4.1 - """ - - __slots__ = "action", "_guarded" - - def __init__(self, action: str = "using"): - self.action: str = action - self._guarded = False - - def __enter__(self) -> None: - if self._guarded: - raise BusyResourceError(self.action) - - self._guarded = True - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - self._guarded = False - return None diff --git a/myenv/Lib/site-packages/anyio/_core/_tasks.py b/myenv/Lib/site-packages/anyio/_core/_tasks.py deleted file mode 100644 index 2f21ea2..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_tasks.py +++ /dev/null @@ -1,158 +0,0 @@ -from __future__ import annotations - -import math -from collections.abc import Generator -from contextlib import contextmanager -from types import TracebackType - -from ..abc._tasks import TaskGroup, TaskStatus -from ._eventloop import get_async_backend - - -class _IgnoredTaskStatus(TaskStatus[object]): - def started(self, value: object = None) -> None: - pass - - -TASK_STATUS_IGNORED = _IgnoredTaskStatus() - - -class CancelScope: - """ - Wraps a unit of work that can be made separately cancellable. - - :param deadline: The time (clock value) when this scope is cancelled automatically - :param shield: ``True`` to shield the cancel scope from external cancellation - """ - - def __new__( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> CancelScope: - return get_async_backend().create_cancel_scope(shield=shield, deadline=deadline) - - def cancel(self) -> None: - """Cancel this scope immediately.""" - raise NotImplementedError - - @property - def deadline(self) -> float: - """ - The time (clock value) when this scope is cancelled automatically. - - Will be ``float('inf')`` if no timeout has been set. - - """ - raise NotImplementedError - - @deadline.setter - def deadline(self, value: float) -> None: - raise NotImplementedError - - @property - def cancel_called(self) -> bool: - """``True`` if :meth:`cancel` has been called.""" - raise NotImplementedError - - @property - def cancelled_caught(self) -> bool: - """ - ``True`` if this scope suppressed a cancellation exception it itself raised. - - This is typically used to check if any work was interrupted, or to see if the - scope was cancelled due to its deadline being reached. The value will, however, - only be ``True`` if the cancellation was triggered by the scope itself (and not - an outer scope). - - """ - raise NotImplementedError - - @property - def shield(self) -> bool: - """ - ``True`` if this scope is shielded from external cancellation. - - While a scope is shielded, it will not receive cancellations from outside. - - """ - raise NotImplementedError - - @shield.setter - def shield(self, value: bool) -> None: - raise NotImplementedError - - def __enter__(self) -> CancelScope: - raise NotImplementedError - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - raise NotImplementedError - - -@contextmanager -def fail_after( - delay: float | None, shield: bool = False -) -> Generator[CancelScope, None, None]: - """ - Create a context manager which raises a :class:`TimeoutError` if does not finish in - time. - - :param delay: maximum allowed time (in seconds) before raising the exception, or - ``None`` to disable the timeout - :param shield: ``True`` to shield the cancel scope from external cancellation - :return: a context manager that yields a cancel scope - :rtype: :class:`~typing.ContextManager`\\[:class:`~anyio.CancelScope`\\] - - """ - current_time = get_async_backend().current_time - deadline = (current_time() + delay) if delay is not None else math.inf - with get_async_backend().create_cancel_scope( - deadline=deadline, shield=shield - ) as cancel_scope: - yield cancel_scope - - if cancel_scope.cancelled_caught and current_time() >= cancel_scope.deadline: - raise TimeoutError - - -def move_on_after(delay: float | None, shield: bool = False) -> CancelScope: - """ - Create a cancel scope with a deadline that expires after the given delay. - - :param delay: maximum allowed time (in seconds) before exiting the context block, or - ``None`` to disable the timeout - :param shield: ``True`` to shield the cancel scope from external cancellation - :return: a cancel scope - - """ - deadline = ( - (get_async_backend().current_time() + delay) if delay is not None else math.inf - ) - return get_async_backend().create_cancel_scope(deadline=deadline, shield=shield) - - -def current_effective_deadline() -> float: - """ - Return the nearest deadline among all the cancel scopes effective for the current - task. - - :return: a clock value from the event loop's internal clock (or ``float('inf')`` if - there is no deadline in effect, or ``float('-inf')`` if the current scope has - been cancelled) - :rtype: float - - """ - return get_async_backend().current_effective_deadline() - - -def create_task_group() -> TaskGroup: - """ - Create a task group. - - :return: a task group - - """ - return get_async_backend().create_task_group() diff --git a/myenv/Lib/site-packages/anyio/_core/_testing.py b/myenv/Lib/site-packages/anyio/_core/_testing.py deleted file mode 100644 index 9e28b22..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_testing.py +++ /dev/null @@ -1,78 +0,0 @@ -from __future__ import annotations - -from collections.abc import Awaitable, Generator -from typing import Any, cast - -from ._eventloop import get_async_backend - - -class TaskInfo: - """ - Represents an asynchronous task. - - :ivar int id: the unique identifier of the task - :ivar parent_id: the identifier of the parent task, if any - :vartype parent_id: Optional[int] - :ivar str name: the description of the task (if any) - :ivar ~collections.abc.Coroutine coro: the coroutine object of the task - """ - - __slots__ = "_name", "id", "parent_id", "name", "coro" - - def __init__( - self, - id: int, - parent_id: int | None, - name: str | None, - coro: Generator[Any, Any, Any] | Awaitable[Any], - ): - func = get_current_task - self._name = f"{func.__module__}.{func.__qualname__}" - self.id: int = id - self.parent_id: int | None = parent_id - self.name: str | None = name - self.coro: Generator[Any, Any, Any] | Awaitable[Any] = coro - - def __eq__(self, other: object) -> bool: - if isinstance(other, TaskInfo): - return self.id == other.id - - return NotImplemented - - def __hash__(self) -> int: - return hash(self.id) - - def __repr__(self) -> str: - return f"{self.__class__.__name__}(id={self.id!r}, name={self.name!r})" - - def has_pending_cancellation(self) -> bool: - """ - Return ``True`` if the task has a cancellation pending, ``False`` otherwise. - - """ - return False - - -def get_current_task() -> TaskInfo: - """ - Return the current task. - - :return: a representation of the current task - - """ - return get_async_backend().get_current_task() - - -def get_running_tasks() -> list[TaskInfo]: - """ - Return a list of running tasks in the current event loop. - - :return: a list of task info objects - - """ - return cast("list[TaskInfo]", get_async_backend().get_running_tasks()) - - -async def wait_all_tasks_blocked() -> None: - """Wait until all other tasks are waiting for something.""" - await get_async_backend().wait_all_tasks_blocked() diff --git a/myenv/Lib/site-packages/anyio/_core/_typedattr.py b/myenv/Lib/site-packages/anyio/_core/_typedattr.py deleted file mode 100644 index f358a44..0000000 --- a/myenv/Lib/site-packages/anyio/_core/_typedattr.py +++ /dev/null @@ -1,81 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping -from typing import Any, TypeVar, final, overload - -from ._exceptions import TypedAttributeLookupError - -T_Attr = TypeVar("T_Attr") -T_Default = TypeVar("T_Default") -undefined = object() - - -def typed_attribute() -> Any: - """Return a unique object, used to mark typed attributes.""" - return object() - - -class TypedAttributeSet: - """ - Superclass for typed attribute collections. - - Checks that every public attribute of every subclass has a type annotation. - """ - - def __init_subclass__(cls) -> None: - annotations: dict[str, Any] = getattr(cls, "__annotations__", {}) - for attrname in dir(cls): - if not attrname.startswith("_") and attrname not in annotations: - raise TypeError( - f"Attribute {attrname!r} is missing its type annotation" - ) - - super().__init_subclass__() - - -class TypedAttributeProvider: - """Base class for classes that wish to provide typed extra attributes.""" - - @property - def extra_attributes(self) -> Mapping[T_Attr, Callable[[], T_Attr]]: - """ - A mapping of the extra attributes to callables that return the corresponding - values. - - If the provider wraps another provider, the attributes from that wrapper should - also be included in the returned mapping (but the wrapper may override the - callables from the wrapped instance). - - """ - return {} - - @overload - def extra(self, attribute: T_Attr) -> T_Attr: ... - - @overload - def extra(self, attribute: T_Attr, default: T_Default) -> T_Attr | T_Default: ... - - @final - def extra(self, attribute: Any, default: object = undefined) -> object: - """ - extra(attribute, default=undefined) - - Return the value of the given typed extra attribute. - - :param attribute: the attribute (member of a :class:`~TypedAttributeSet`) to - look for - :param default: the value that should be returned if no value is found for the - attribute - :raises ~anyio.TypedAttributeLookupError: if the search failed and no default - value was given - - """ - try: - getter = self.extra_attributes[attribute] - except KeyError: - if default is undefined: - raise TypedAttributeLookupError("Attribute not found") from None - else: - return default - - return getter() diff --git a/myenv/Lib/site-packages/anyio/abc/__init__.py b/myenv/Lib/site-packages/anyio/abc/__init__.py deleted file mode 100644 index 1ca0fcf..0000000 --- a/myenv/Lib/site-packages/anyio/abc/__init__.py +++ /dev/null @@ -1,57 +0,0 @@ -from __future__ import annotations - -from typing import Any - -from ._eventloop import AsyncBackend as AsyncBackend -from ._resources import AsyncResource as AsyncResource -from ._sockets import ConnectedUDPSocket as ConnectedUDPSocket -from ._sockets import ConnectedUNIXDatagramSocket as ConnectedUNIXDatagramSocket -from ._sockets import IPAddressType as IPAddressType -from ._sockets import IPSockAddrType as IPSockAddrType -from ._sockets import SocketAttribute as SocketAttribute -from ._sockets import SocketListener as SocketListener -from ._sockets import SocketStream as SocketStream -from ._sockets import UDPPacketType as UDPPacketType -from ._sockets import UDPSocket as UDPSocket -from ._sockets import UNIXDatagramPacketType as UNIXDatagramPacketType -from ._sockets import UNIXDatagramSocket as UNIXDatagramSocket -from ._sockets import UNIXSocketStream as UNIXSocketStream -from ._streams import AnyByteReceiveStream as AnyByteReceiveStream -from ._streams import AnyByteSendStream as AnyByteSendStream -from ._streams import AnyByteStream as AnyByteStream -from ._streams import AnyUnreliableByteReceiveStream as AnyUnreliableByteReceiveStream -from ._streams import AnyUnreliableByteSendStream as AnyUnreliableByteSendStream -from ._streams import AnyUnreliableByteStream as AnyUnreliableByteStream -from ._streams import ByteReceiveStream as ByteReceiveStream -from ._streams import ByteSendStream as ByteSendStream -from ._streams import ByteStream as ByteStream -from ._streams import Listener as Listener -from ._streams import ObjectReceiveStream as ObjectReceiveStream -from ._streams import ObjectSendStream as ObjectSendStream -from ._streams import ObjectStream as ObjectStream -from ._streams import UnreliableObjectReceiveStream as UnreliableObjectReceiveStream -from ._streams import UnreliableObjectSendStream as UnreliableObjectSendStream -from ._streams import UnreliableObjectStream as UnreliableObjectStream -from ._subprocesses import Process as Process -from ._tasks import TaskGroup as TaskGroup -from ._tasks import TaskStatus as TaskStatus -from ._testing import TestRunner as TestRunner - -# Re-exported here, for backwards compatibility -# isort: off -from .._core._synchronization import ( - CapacityLimiter as CapacityLimiter, - Condition as Condition, - Event as Event, - Lock as Lock, - Semaphore as Semaphore, -) -from .._core._tasks import CancelScope as CancelScope -from ..from_thread import BlockingPortal as BlockingPortal - -# Re-export imports so they look like they live directly in this package -key: str -value: Any -for key, value in list(locals().items()): - if getattr(value, "__module__", "").startswith("anyio.abc."): - value.__module__ = __name__ diff --git a/myenv/Lib/site-packages/anyio/abc/_eventloop.py b/myenv/Lib/site-packages/anyio/abc/_eventloop.py deleted file mode 100644 index 93d0e9d..0000000 --- a/myenv/Lib/site-packages/anyio/abc/_eventloop.py +++ /dev/null @@ -1,374 +0,0 @@ -from __future__ import annotations - -import math -import sys -from abc import ABCMeta, abstractmethod -from collections.abc import AsyncIterator, Awaitable, Callable, Sequence -from contextlib import AbstractContextManager -from os import PathLike -from signal import Signals -from socket import AddressFamily, SocketKind, socket -from typing import ( - IO, - TYPE_CHECKING, - Any, - TypeVar, - Union, - overload, -) - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -if sys.version_info >= (3, 10): - from typing import TypeAlias -else: - from typing_extensions import TypeAlias - -if TYPE_CHECKING: - from .._core._synchronization import CapacityLimiter, Event, Lock, Semaphore - from .._core._tasks import CancelScope - from .._core._testing import TaskInfo - from ..from_thread import BlockingPortal - from ._sockets import ( - ConnectedUDPSocket, - ConnectedUNIXDatagramSocket, - IPSockAddrType, - SocketListener, - SocketStream, - UDPSocket, - UNIXDatagramSocket, - UNIXSocketStream, - ) - from ._subprocesses import Process - from ._tasks import TaskGroup - from ._testing import TestRunner - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") -StrOrBytesPath: TypeAlias = Union[str, bytes, "PathLike[str]", "PathLike[bytes]"] - - -class AsyncBackend(metaclass=ABCMeta): - @classmethod - @abstractmethod - def run( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - options: dict[str, Any], - ) -> T_Retval: - """ - Run the given coroutine function in an asynchronous event loop. - - The current thread must not be already running an event loop. - - :param func: a coroutine function - :param args: positional arguments to ``func`` - :param kwargs: positional arguments to ``func`` - :param options: keyword arguments to call the backend ``run()`` implementation - with - :return: the return value of the coroutine function - """ - - @classmethod - @abstractmethod - def current_token(cls) -> object: - """ - - :return: - """ - - @classmethod - @abstractmethod - def current_time(cls) -> float: - """ - Return the current value of the event loop's internal clock. - - :return: the clock value (seconds) - """ - - @classmethod - @abstractmethod - def cancelled_exception_class(cls) -> type[BaseException]: - """Return the exception class that is raised in a task if it's cancelled.""" - - @classmethod - @abstractmethod - async def checkpoint(cls) -> None: - """ - Check if the task has been cancelled, and allow rescheduling of other tasks. - - This is effectively the same as running :meth:`checkpoint_if_cancelled` and then - :meth:`cancel_shielded_checkpoint`. - """ - - @classmethod - async def checkpoint_if_cancelled(cls) -> None: - """ - Check if the current task group has been cancelled. - - This will check if the task has been cancelled, but will not allow other tasks - to be scheduled if not. - - """ - if cls.current_effective_deadline() == -math.inf: - await cls.checkpoint() - - @classmethod - async def cancel_shielded_checkpoint(cls) -> None: - """ - Allow the rescheduling of other tasks. - - This will give other tasks the opportunity to run, but without checking if the - current task group has been cancelled, unlike with :meth:`checkpoint`. - - """ - with cls.create_cancel_scope(shield=True): - await cls.sleep(0) - - @classmethod - @abstractmethod - async def sleep(cls, delay: float) -> None: - """ - Pause the current task for the specified duration. - - :param delay: the duration, in seconds - """ - - @classmethod - @abstractmethod - def create_cancel_scope( - cls, *, deadline: float = math.inf, shield: bool = False - ) -> CancelScope: - pass - - @classmethod - @abstractmethod - def current_effective_deadline(cls) -> float: - """ - Return the nearest deadline among all the cancel scopes effective for the - current task. - - :return: - - a clock value from the event loop's internal clock - - ``inf`` if there is no deadline in effect - - ``-inf`` if the current scope has been cancelled - :rtype: float - """ - - @classmethod - @abstractmethod - def create_task_group(cls) -> TaskGroup: - pass - - @classmethod - @abstractmethod - def create_event(cls) -> Event: - pass - - @classmethod - @abstractmethod - def create_lock(cls, *, fast_acquire: bool) -> Lock: - pass - - @classmethod - @abstractmethod - def create_semaphore( - cls, - initial_value: int, - *, - max_value: int | None = None, - fast_acquire: bool = False, - ) -> Semaphore: - pass - - @classmethod - @abstractmethod - def create_capacity_limiter(cls, total_tokens: float) -> CapacityLimiter: - pass - - @classmethod - @abstractmethod - async def run_sync_in_worker_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - abandon_on_cancel: bool = False, - limiter: CapacityLimiter | None = None, - ) -> T_Retval: - pass - - @classmethod - @abstractmethod - def check_cancelled(cls) -> None: - pass - - @classmethod - @abstractmethod - def run_async_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - pass - - @classmethod - @abstractmethod - def run_sync_from_thread( - cls, - func: Callable[[Unpack[PosArgsT]], T_Retval], - args: tuple[Unpack[PosArgsT]], - token: object, - ) -> T_Retval: - pass - - @classmethod - @abstractmethod - def create_blocking_portal(cls) -> BlockingPortal: - pass - - @classmethod - @abstractmethod - async def open_process( - cls, - command: StrOrBytesPath | Sequence[StrOrBytesPath], - *, - stdin: int | IO[Any] | None, - stdout: int | IO[Any] | None, - stderr: int | IO[Any] | None, - **kwargs: Any, - ) -> Process: - pass - - @classmethod - @abstractmethod - def setup_process_pool_exit_at_shutdown(cls, workers: set[Process]) -> None: - pass - - @classmethod - @abstractmethod - async def connect_tcp( - cls, host: str, port: int, local_address: IPSockAddrType | None = None - ) -> SocketStream: - pass - - @classmethod - @abstractmethod - async def connect_unix(cls, path: str | bytes) -> UNIXSocketStream: - pass - - @classmethod - @abstractmethod - def create_tcp_listener(cls, sock: socket) -> SocketListener: - pass - - @classmethod - @abstractmethod - def create_unix_listener(cls, sock: socket) -> SocketListener: - pass - - @classmethod - @abstractmethod - async def create_udp_socket( - cls, - family: AddressFamily, - local_address: IPSockAddrType | None, - remote_address: IPSockAddrType | None, - reuse_port: bool, - ) -> UDPSocket | ConnectedUDPSocket: - pass - - @classmethod - @overload - async def create_unix_datagram_socket( - cls, raw_socket: socket, remote_path: None - ) -> UNIXDatagramSocket: ... - - @classmethod - @overload - async def create_unix_datagram_socket( - cls, raw_socket: socket, remote_path: str | bytes - ) -> ConnectedUNIXDatagramSocket: ... - - @classmethod - @abstractmethod - async def create_unix_datagram_socket( - cls, raw_socket: socket, remote_path: str | bytes | None - ) -> UNIXDatagramSocket | ConnectedUNIXDatagramSocket: - pass - - @classmethod - @abstractmethod - async def getaddrinfo( - cls, - host: bytes | str | None, - port: str | int | None, - *, - family: int | AddressFamily = 0, - type: int | SocketKind = 0, - proto: int = 0, - flags: int = 0, - ) -> list[ - tuple[ - AddressFamily, - SocketKind, - int, - str, - tuple[str, int] | tuple[str, int, int, int], - ] - ]: - pass - - @classmethod - @abstractmethod - async def getnameinfo( - cls, sockaddr: IPSockAddrType, flags: int = 0 - ) -> tuple[str, str]: - pass - - @classmethod - @abstractmethod - async def wait_socket_readable(cls, sock: socket) -> None: - pass - - @classmethod - @abstractmethod - async def wait_socket_writable(cls, sock: socket) -> None: - pass - - @classmethod - @abstractmethod - def current_default_thread_limiter(cls) -> CapacityLimiter: - pass - - @classmethod - @abstractmethod - def open_signal_receiver( - cls, *signals: Signals - ) -> AbstractContextManager[AsyncIterator[Signals]]: - pass - - @classmethod - @abstractmethod - def get_current_task(cls) -> TaskInfo: - pass - - @classmethod - @abstractmethod - def get_running_tasks(cls) -> Sequence[TaskInfo]: - pass - - @classmethod - @abstractmethod - async def wait_all_tasks_blocked(cls) -> None: - pass - - @classmethod - @abstractmethod - def create_test_runner(cls, options: dict[str, Any]) -> TestRunner: - pass diff --git a/myenv/Lib/site-packages/anyio/abc/_resources.py b/myenv/Lib/site-packages/anyio/abc/_resources.py deleted file mode 100644 index 10df115..0000000 --- a/myenv/Lib/site-packages/anyio/abc/_resources.py +++ /dev/null @@ -1,33 +0,0 @@ -from __future__ import annotations - -from abc import ABCMeta, abstractmethod -from types import TracebackType -from typing import TypeVar - -T = TypeVar("T") - - -class AsyncResource(metaclass=ABCMeta): - """ - Abstract base class for all closeable asynchronous resources. - - Works as an asynchronous context manager which returns the instance itself on enter, - and calls :meth:`aclose` on exit. - """ - - __slots__ = () - - async def __aenter__(self: T) -> T: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - await self.aclose() - - @abstractmethod - async def aclose(self) -> None: - """Close the resource.""" diff --git a/myenv/Lib/site-packages/anyio/abc/_sockets.py b/myenv/Lib/site-packages/anyio/abc/_sockets.py deleted file mode 100644 index 1c6a450..0000000 --- a/myenv/Lib/site-packages/anyio/abc/_sockets.py +++ /dev/null @@ -1,194 +0,0 @@ -from __future__ import annotations - -import socket -from abc import abstractmethod -from collections.abc import Callable, Collection, Mapping -from contextlib import AsyncExitStack -from io import IOBase -from ipaddress import IPv4Address, IPv6Address -from socket import AddressFamily -from types import TracebackType -from typing import Any, TypeVar, Union - -from .._core._typedattr import ( - TypedAttributeProvider, - TypedAttributeSet, - typed_attribute, -) -from ._streams import ByteStream, Listener, UnreliableObjectStream -from ._tasks import TaskGroup - -IPAddressType = Union[str, IPv4Address, IPv6Address] -IPSockAddrType = tuple[str, int] -SockAddrType = Union[IPSockAddrType, str] -UDPPacketType = tuple[bytes, IPSockAddrType] -UNIXDatagramPacketType = tuple[bytes, str] -T_Retval = TypeVar("T_Retval") - - -class _NullAsyncContextManager: - async def __aenter__(self) -> None: - pass - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - return None - - -class SocketAttribute(TypedAttributeSet): - #: the address family of the underlying socket - family: AddressFamily = typed_attribute() - #: the local socket address of the underlying socket - local_address: SockAddrType = typed_attribute() - #: for IP addresses, the local port the underlying socket is bound to - local_port: int = typed_attribute() - #: the underlying stdlib socket object - raw_socket: socket.socket = typed_attribute() - #: the remote address the underlying socket is connected to - remote_address: SockAddrType = typed_attribute() - #: for IP addresses, the remote port the underlying socket is connected to - remote_port: int = typed_attribute() - - -class _SocketProvider(TypedAttributeProvider): - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - from .._core._sockets import convert_ipv6_sockaddr as convert - - attributes: dict[Any, Callable[[], Any]] = { - SocketAttribute.family: lambda: self._raw_socket.family, - SocketAttribute.local_address: lambda: convert( - self._raw_socket.getsockname() - ), - SocketAttribute.raw_socket: lambda: self._raw_socket, - } - try: - peername: tuple[str, int] | None = convert(self._raw_socket.getpeername()) - except OSError: - peername = None - - # Provide the remote address for connected sockets - if peername is not None: - attributes[SocketAttribute.remote_address] = lambda: peername - - # Provide local and remote ports for IP based sockets - if self._raw_socket.family in (AddressFamily.AF_INET, AddressFamily.AF_INET6): - attributes[SocketAttribute.local_port] = ( - lambda: self._raw_socket.getsockname()[1] - ) - if peername is not None: - remote_port = peername[1] - attributes[SocketAttribute.remote_port] = lambda: remote_port - - return attributes - - @property - @abstractmethod - def _raw_socket(self) -> socket.socket: - pass - - -class SocketStream(ByteStream, _SocketProvider): - """ - Transports bytes over a socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - -class UNIXSocketStream(SocketStream): - @abstractmethod - async def send_fds(self, message: bytes, fds: Collection[int | IOBase]) -> None: - """ - Send file descriptors along with a message to the peer. - - :param message: a non-empty bytestring - :param fds: a collection of files (either numeric file descriptors or open file - or socket objects) - """ - - @abstractmethod - async def receive_fds(self, msglen: int, maxfds: int) -> tuple[bytes, list[int]]: - """ - Receive file descriptors along with a message from the peer. - - :param msglen: length of the message to expect from the peer - :param maxfds: maximum number of file descriptors to expect from the peer - :return: a tuple of (message, file descriptors) - """ - - -class SocketListener(Listener[SocketStream], _SocketProvider): - """ - Listens to incoming socket connections. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - @abstractmethod - async def accept(self) -> SocketStream: - """Accept an incoming connection.""" - - async def serve( - self, - handler: Callable[[SocketStream], Any], - task_group: TaskGroup | None = None, - ) -> None: - from .. import create_task_group - - async with AsyncExitStack() as stack: - if task_group is None: - task_group = await stack.enter_async_context(create_task_group()) - - while True: - stream = await self.accept() - task_group.start_soon(handler, stream) - - -class UDPSocket(UnreliableObjectStream[UDPPacketType], _SocketProvider): - """ - Represents an unconnected UDP socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - async def sendto(self, data: bytes, host: str, port: int) -> None: - """ - Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, (host, port))). - - """ - return await self.send((data, (host, port))) - - -class ConnectedUDPSocket(UnreliableObjectStream[bytes], _SocketProvider): - """ - Represents an connected UDP socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - -class UNIXDatagramSocket( - UnreliableObjectStream[UNIXDatagramPacketType], _SocketProvider -): - """ - Represents an unconnected Unix datagram socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ - - async def sendto(self, data: bytes, path: str) -> None: - """Alias for :meth:`~.UnreliableObjectSendStream.send` ((data, path)).""" - return await self.send((data, path)) - - -class ConnectedUNIXDatagramSocket(UnreliableObjectStream[bytes], _SocketProvider): - """ - Represents a connected Unix datagram socket. - - Supports all relevant extra attributes from :class:`~SocketAttribute`. - """ diff --git a/myenv/Lib/site-packages/anyio/abc/_streams.py b/myenv/Lib/site-packages/anyio/abc/_streams.py deleted file mode 100644 index 8c63868..0000000 --- a/myenv/Lib/site-packages/anyio/abc/_streams.py +++ /dev/null @@ -1,203 +0,0 @@ -from __future__ import annotations - -from abc import abstractmethod -from collections.abc import Callable -from typing import Any, Generic, TypeVar, Union - -from .._core._exceptions import EndOfStream -from .._core._typedattr import TypedAttributeProvider -from ._resources import AsyncResource -from ._tasks import TaskGroup - -T_Item = TypeVar("T_Item") -T_co = TypeVar("T_co", covariant=True) -T_contra = TypeVar("T_contra", contravariant=True) - - -class UnreliableObjectReceiveStream( - Generic[T_co], AsyncResource, TypedAttributeProvider -): - """ - An interface for receiving objects. - - This interface makes no guarantees that the received messages arrive in the order in - which they were sent, or that no messages are missed. - - Asynchronously iterating over objects of this type will yield objects matching the - given type parameter. - """ - - def __aiter__(self) -> UnreliableObjectReceiveStream[T_co]: - return self - - async def __anext__(self) -> T_co: - try: - return await self.receive() - except EndOfStream: - raise StopAsyncIteration - - @abstractmethod - async def receive(self) -> T_co: - """ - Receive the next item. - - :raises ~anyio.ClosedResourceError: if the receive stream has been explicitly - closed - :raises ~anyio.EndOfStream: if this stream has been closed from the other end - :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable - due to external causes - """ - - -class UnreliableObjectSendStream( - Generic[T_contra], AsyncResource, TypedAttributeProvider -): - """ - An interface for sending objects. - - This interface makes no guarantees that the messages sent will reach the - recipient(s) in the same order in which they were sent, or at all. - """ - - @abstractmethod - async def send(self, item: T_contra) -> None: - """ - Send an item to the peer(s). - - :param item: the item to send - :raises ~anyio.ClosedResourceError: if the send stream has been explicitly - closed - :raises ~anyio.BrokenResourceError: if this stream has been rendered unusable - due to external causes - """ - - -class UnreliableObjectStream( - UnreliableObjectReceiveStream[T_Item], UnreliableObjectSendStream[T_Item] -): - """ - A bidirectional message stream which does not guarantee the order or reliability of - message delivery. - """ - - -class ObjectReceiveStream(UnreliableObjectReceiveStream[T_co]): - """ - A receive message stream which guarantees that messages are received in the same - order in which they were sent, and that no messages are missed. - """ - - -class ObjectSendStream(UnreliableObjectSendStream[T_contra]): - """ - A send message stream which guarantees that messages are delivered in the same order - in which they were sent, without missing any messages in the middle. - """ - - -class ObjectStream( - ObjectReceiveStream[T_Item], - ObjectSendStream[T_Item], - UnreliableObjectStream[T_Item], -): - """ - A bidirectional message stream which guarantees the order and reliability of message - delivery. - """ - - @abstractmethod - async def send_eof(self) -> None: - """ - Send an end-of-file indication to the peer. - - You should not try to send any further data to this stream after calling this - method. This method is idempotent (does nothing on successive calls). - """ - - -class ByteReceiveStream(AsyncResource, TypedAttributeProvider): - """ - An interface for receiving bytes from a single peer. - - Iterating this byte stream will yield a byte string of arbitrary length, but no more - than 65536 bytes. - """ - - def __aiter__(self) -> ByteReceiveStream: - return self - - async def __anext__(self) -> bytes: - try: - return await self.receive() - except EndOfStream: - raise StopAsyncIteration - - @abstractmethod - async def receive(self, max_bytes: int = 65536) -> bytes: - """ - Receive at most ``max_bytes`` bytes from the peer. - - .. note:: Implementors of this interface should not return an empty - :class:`bytes` object, and users should ignore them. - - :param max_bytes: maximum number of bytes to receive - :return: the received bytes - :raises ~anyio.EndOfStream: if this stream has been closed from the other end - """ - - -class ByteSendStream(AsyncResource, TypedAttributeProvider): - """An interface for sending bytes to a single peer.""" - - @abstractmethod - async def send(self, item: bytes) -> None: - """ - Send the given bytes to the peer. - - :param item: the bytes to send - """ - - -class ByteStream(ByteReceiveStream, ByteSendStream): - """A bidirectional byte stream.""" - - @abstractmethod - async def send_eof(self) -> None: - """ - Send an end-of-file indication to the peer. - - You should not try to send any further data to this stream after calling this - method. This method is idempotent (does nothing on successive calls). - """ - - -#: Type alias for all unreliable bytes-oriented receive streams. -AnyUnreliableByteReceiveStream = Union[ - UnreliableObjectReceiveStream[bytes], ByteReceiveStream -] -#: Type alias for all unreliable bytes-oriented send streams. -AnyUnreliableByteSendStream = Union[UnreliableObjectSendStream[bytes], ByteSendStream] -#: Type alias for all unreliable bytes-oriented streams. -AnyUnreliableByteStream = Union[UnreliableObjectStream[bytes], ByteStream] -#: Type alias for all bytes-oriented receive streams. -AnyByteReceiveStream = Union[ObjectReceiveStream[bytes], ByteReceiveStream] -#: Type alias for all bytes-oriented send streams. -AnyByteSendStream = Union[ObjectSendStream[bytes], ByteSendStream] -#: Type alias for all bytes-oriented streams. -AnyByteStream = Union[ObjectStream[bytes], ByteStream] - - -class Listener(Generic[T_co], AsyncResource, TypedAttributeProvider): - """An interface for objects that let you accept incoming connections.""" - - @abstractmethod - async def serve( - self, handler: Callable[[T_co], Any], task_group: TaskGroup | None = None - ) -> None: - """ - Accept incoming connections as they come in and start tasks to handle them. - - :param handler: a callable that will be used to handle each accepted connection - :param task_group: the task group that will be used to start tasks for handling - each accepted connection (if omitted, an ad-hoc task group will be created) - """ diff --git a/myenv/Lib/site-packages/anyio/abc/_subprocesses.py b/myenv/Lib/site-packages/anyio/abc/_subprocesses.py deleted file mode 100644 index ce0564c..0000000 --- a/myenv/Lib/site-packages/anyio/abc/_subprocesses.py +++ /dev/null @@ -1,79 +0,0 @@ -from __future__ import annotations - -from abc import abstractmethod -from signal import Signals - -from ._resources import AsyncResource -from ._streams import ByteReceiveStream, ByteSendStream - - -class Process(AsyncResource): - """An asynchronous version of :class:`subprocess.Popen`.""" - - @abstractmethod - async def wait(self) -> int: - """ - Wait until the process exits. - - :return: the exit code of the process - """ - - @abstractmethod - def terminate(self) -> None: - """ - Terminates the process, gracefully if possible. - - On Windows, this calls ``TerminateProcess()``. - On POSIX systems, this sends ``SIGTERM`` to the process. - - .. seealso:: :meth:`subprocess.Popen.terminate` - """ - - @abstractmethod - def kill(self) -> None: - """ - Kills the process. - - On Windows, this calls ``TerminateProcess()``. - On POSIX systems, this sends ``SIGKILL`` to the process. - - .. seealso:: :meth:`subprocess.Popen.kill` - """ - - @abstractmethod - def send_signal(self, signal: Signals) -> None: - """ - Send a signal to the subprocess. - - .. seealso:: :meth:`subprocess.Popen.send_signal` - - :param signal: the signal number (e.g. :data:`signal.SIGHUP`) - """ - - @property - @abstractmethod - def pid(self) -> int: - """The process ID of the process.""" - - @property - @abstractmethod - def returncode(self) -> int | None: - """ - The return code of the process. If the process has not yet terminated, this will - be ``None``. - """ - - @property - @abstractmethod - def stdin(self) -> ByteSendStream | None: - """The stream for the standard input of the process.""" - - @property - @abstractmethod - def stdout(self) -> ByteReceiveStream | None: - """The stream for the standard output of the process.""" - - @property - @abstractmethod - def stderr(self) -> ByteReceiveStream | None: - """The stream for the standard error output of the process.""" diff --git a/myenv/Lib/site-packages/anyio/abc/_tasks.py b/myenv/Lib/site-packages/anyio/abc/_tasks.py deleted file mode 100644 index 88aecf3..0000000 --- a/myenv/Lib/site-packages/anyio/abc/_tasks.py +++ /dev/null @@ -1,95 +0,0 @@ -from __future__ import annotations - -import sys -from abc import ABCMeta, abstractmethod -from collections.abc import Awaitable, Callable -from types import TracebackType -from typing import TYPE_CHECKING, Any, Protocol, TypeVar, overload - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -if TYPE_CHECKING: - from .._core._tasks import CancelScope - -T_Retval = TypeVar("T_Retval") -T_contra = TypeVar("T_contra", contravariant=True) -PosArgsT = TypeVarTuple("PosArgsT") - - -class TaskStatus(Protocol[T_contra]): - @overload - def started(self: TaskStatus[None]) -> None: ... - - @overload - def started(self, value: T_contra) -> None: ... - - def started(self, value: T_contra | None = None) -> None: - """ - Signal that the task has started. - - :param value: object passed back to the starter of the task - """ - - -class TaskGroup(metaclass=ABCMeta): - """ - Groups several asynchronous tasks together. - - :ivar cancel_scope: the cancel scope inherited by all child tasks - :vartype cancel_scope: CancelScope - """ - - cancel_scope: CancelScope - - @abstractmethod - def start_soon( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[Any]], - *args: Unpack[PosArgsT], - name: object = None, - ) -> None: - """ - Start a new task in this task group. - - :param func: a coroutine function - :param args: positional arguments to call the function with - :param name: name of the task, for the purposes of introspection and debugging - - .. versionadded:: 3.0 - """ - - @abstractmethod - async def start( - self, - func: Callable[..., Awaitable[Any]], - *args: object, - name: object = None, - ) -> Any: - """ - Start a new task and wait until it signals for readiness. - - :param func: a coroutine function - :param args: positional arguments to call the function with - :param name: name of the task, for the purposes of introspection and debugging - :return: the value passed to ``task_status.started()`` - :raises RuntimeError: if the task finishes without calling - ``task_status.started()`` - - .. versionadded:: 3.0 - """ - - @abstractmethod - async def __aenter__(self) -> TaskGroup: - """Enter the task group context and allow starting new tasks.""" - - @abstractmethod - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - """Exit the task group context waiting for all tasks to finish.""" diff --git a/myenv/Lib/site-packages/anyio/abc/_testing.py b/myenv/Lib/site-packages/anyio/abc/_testing.py deleted file mode 100644 index 7c50ed7..0000000 --- a/myenv/Lib/site-packages/anyio/abc/_testing.py +++ /dev/null @@ -1,65 +0,0 @@ -from __future__ import annotations - -import types -from abc import ABCMeta, abstractmethod -from collections.abc import AsyncGenerator, Callable, Coroutine, Iterable -from typing import Any, TypeVar - -_T = TypeVar("_T") - - -class TestRunner(metaclass=ABCMeta): - """ - Encapsulates a running event loop. Every call made through this object will use the - same event loop. - """ - - def __enter__(self) -> TestRunner: - return self - - @abstractmethod - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: types.TracebackType | None, - ) -> bool | None: ... - - @abstractmethod - def run_asyncgen_fixture( - self, - fixture_func: Callable[..., AsyncGenerator[_T, Any]], - kwargs: dict[str, Any], - ) -> Iterable[_T]: - """ - Run an async generator fixture. - - :param fixture_func: the fixture function - :param kwargs: keyword arguments to call the fixture function with - :return: an iterator yielding the value yielded from the async generator - """ - - @abstractmethod - def run_fixture( - self, - fixture_func: Callable[..., Coroutine[Any, Any, _T]], - kwargs: dict[str, Any], - ) -> _T: - """ - Run an async fixture. - - :param fixture_func: the fixture function - :param kwargs: keyword arguments to call the fixture function with - :return: the return value of the fixture function - """ - - @abstractmethod - def run_test( - self, test_func: Callable[..., Coroutine[Any, Any, Any]], kwargs: dict[str, Any] - ) -> None: - """ - Run an async test function. - - :param test_func: the test function - :param kwargs: keyword arguments to call the test function with - """ diff --git a/myenv/Lib/site-packages/anyio/from_thread.py b/myenv/Lib/site-packages/anyio/from_thread.py deleted file mode 100644 index 93a4cfe..0000000 --- a/myenv/Lib/site-packages/anyio/from_thread.py +++ /dev/null @@ -1,527 +0,0 @@ -from __future__ import annotations - -import sys -from collections.abc import Awaitable, Callable, Generator -from concurrent.futures import Future -from contextlib import ( - AbstractAsyncContextManager, - AbstractContextManager, - contextmanager, -) -from dataclasses import dataclass, field -from inspect import isawaitable -from threading import Lock, Thread, get_ident -from types import TracebackType -from typing import ( - Any, - Generic, - TypeVar, - cast, - overload, -) - -from ._core import _eventloop -from ._core._eventloop import get_async_backend, get_cancelled_exc_class, threadlocals -from ._core._synchronization import Event -from ._core._tasks import CancelScope, create_task_group -from .abc import AsyncBackend -from .abc._tasks import TaskStatus - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -T_Retval = TypeVar("T_Retval") -T_co = TypeVar("T_co", covariant=True) -PosArgsT = TypeVarTuple("PosArgsT") - - -def run( - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], *args: Unpack[PosArgsT] -) -> T_Retval: - """ - Call a coroutine function from a worker thread. - - :param func: a coroutine function - :param args: positional arguments for the callable - :return: the return value of the coroutine function - - """ - try: - async_backend = threadlocals.current_async_backend - token = threadlocals.current_token - except AttributeError: - raise RuntimeError( - "This function can only be run from an AnyIO worker thread" - ) from None - - return async_backend.run_async_from_thread(func, args, token=token) - - -def run_sync( - func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] -) -> T_Retval: - """ - Call a function in the event loop thread from a worker thread. - - :param func: a callable - :param args: positional arguments for the callable - :return: the return value of the callable - - """ - try: - async_backend = threadlocals.current_async_backend - token = threadlocals.current_token - except AttributeError: - raise RuntimeError( - "This function can only be run from an AnyIO worker thread" - ) from None - - return async_backend.run_sync_from_thread(func, args, token=token) - - -class _BlockingAsyncContextManager(Generic[T_co], AbstractContextManager): - _enter_future: Future[T_co] - _exit_future: Future[bool | None] - _exit_event: Event - _exit_exc_info: tuple[ - type[BaseException] | None, BaseException | None, TracebackType | None - ] = (None, None, None) - - def __init__( - self, async_cm: AbstractAsyncContextManager[T_co], portal: BlockingPortal - ): - self._async_cm = async_cm - self._portal = portal - - async def run_async_cm(self) -> bool | None: - try: - self._exit_event = Event() - value = await self._async_cm.__aenter__() - except BaseException as exc: - self._enter_future.set_exception(exc) - raise - else: - self._enter_future.set_result(value) - - try: - # Wait for the sync context manager to exit. - # This next statement can raise `get_cancelled_exc_class()` if - # something went wrong in a task group in this async context - # manager. - await self._exit_event.wait() - finally: - # In case of cancellation, it could be that we end up here before - # `_BlockingAsyncContextManager.__exit__` is called, and an - # `_exit_exc_info` has been set. - result = await self._async_cm.__aexit__(*self._exit_exc_info) - return result - - def __enter__(self) -> T_co: - self._enter_future = Future() - self._exit_future = self._portal.start_task_soon(self.run_async_cm) - return self._enter_future.result() - - def __exit__( - self, - __exc_type: type[BaseException] | None, - __exc_value: BaseException | None, - __traceback: TracebackType | None, - ) -> bool | None: - self._exit_exc_info = __exc_type, __exc_value, __traceback - self._portal.call(self._exit_event.set) - return self._exit_future.result() - - -class _BlockingPortalTaskStatus(TaskStatus): - def __init__(self, future: Future): - self._future = future - - def started(self, value: object = None) -> None: - self._future.set_result(value) - - -class BlockingPortal: - """An object that lets external threads run code in an asynchronous event loop.""" - - def __new__(cls) -> BlockingPortal: - return get_async_backend().create_blocking_portal() - - def __init__(self) -> None: - self._event_loop_thread_id: int | None = get_ident() - self._stop_event = Event() - self._task_group = create_task_group() - self._cancelled_exc_class = get_cancelled_exc_class() - - async def __aenter__(self) -> BlockingPortal: - await self._task_group.__aenter__() - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> bool | None: - await self.stop() - return await self._task_group.__aexit__(exc_type, exc_val, exc_tb) - - def _check_running(self) -> None: - if self._event_loop_thread_id is None: - raise RuntimeError("This portal is not running") - if self._event_loop_thread_id == get_ident(): - raise RuntimeError( - "This method cannot be called from the event loop thread" - ) - - async def sleep_until_stopped(self) -> None: - """Sleep until :meth:`stop` is called.""" - await self._stop_event.wait() - - async def stop(self, cancel_remaining: bool = False) -> None: - """ - Signal the portal to shut down. - - This marks the portal as no longer accepting new calls and exits from - :meth:`sleep_until_stopped`. - - :param cancel_remaining: ``True`` to cancel all the remaining tasks, ``False`` - to let them finish before returning - - """ - self._event_loop_thread_id = None - self._stop_event.set() - if cancel_remaining: - self._task_group.cancel_scope.cancel() - - async def _call_func( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - future: Future[T_Retval], - ) -> None: - def callback(f: Future[T_Retval]) -> None: - if f.cancelled() and self._event_loop_thread_id not in ( - None, - get_ident(), - ): - self.call(scope.cancel) - - try: - retval_or_awaitable = func(*args, **kwargs) - if isawaitable(retval_or_awaitable): - with CancelScope() as scope: - if future.cancelled(): - scope.cancel() - else: - future.add_done_callback(callback) - - retval = await retval_or_awaitable - else: - retval = retval_or_awaitable - except self._cancelled_exc_class: - future.cancel() - future.set_running_or_notify_cancel() - except BaseException as exc: - if not future.cancelled(): - future.set_exception(exc) - - # Let base exceptions fall through - if not isinstance(exc, Exception): - raise - else: - if not future.cancelled(): - future.set_result(retval) - finally: - scope = None # type: ignore[assignment] - - def _spawn_task_from_thread( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - args: tuple[Unpack[PosArgsT]], - kwargs: dict[str, Any], - name: object, - future: Future[T_Retval], - ) -> None: - """ - Spawn a new task using the given callable. - - Implementors must ensure that the future is resolved when the task finishes. - - :param func: a callable - :param args: positional arguments to be passed to the callable - :param kwargs: keyword arguments to be passed to the callable - :param name: name of the task (will be coerced to a string if not ``None``) - :param future: a future that will resolve to the return value of the callable, - or the exception raised during its execution - - """ - raise NotImplementedError - - @overload - def call( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - *args: Unpack[PosArgsT], - ) -> T_Retval: ... - - @overload - def call( - self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] - ) -> T_Retval: ... - - def call( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - *args: Unpack[PosArgsT], - ) -> T_Retval: - """ - Call the given function in the event loop thread. - - If the callable returns a coroutine object, it is awaited on. - - :param func: any callable - :raises RuntimeError: if the portal is not running or if this method is called - from within the event loop thread - - """ - return cast(T_Retval, self.start_task_soon(func, *args).result()) - - @overload - def start_task_soon( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval]], - *args: Unpack[PosArgsT], - name: object = None, - ) -> Future[T_Retval]: ... - - @overload - def start_task_soon( - self, - func: Callable[[Unpack[PosArgsT]], T_Retval], - *args: Unpack[PosArgsT], - name: object = None, - ) -> Future[T_Retval]: ... - - def start_task_soon( - self, - func: Callable[[Unpack[PosArgsT]], Awaitable[T_Retval] | T_Retval], - *args: Unpack[PosArgsT], - name: object = None, - ) -> Future[T_Retval]: - """ - Start a task in the portal's task group. - - The task will be run inside a cancel scope which can be cancelled by cancelling - the returned future. - - :param func: the target function - :param args: positional arguments passed to ``func`` - :param name: name of the task (will be coerced to a string if not ``None``) - :return: a future that resolves with the return value of the callable if the - task completes successfully, or with the exception raised in the task - :raises RuntimeError: if the portal is not running or if this method is called - from within the event loop thread - :rtype: concurrent.futures.Future[T_Retval] - - .. versionadded:: 3.0 - - """ - self._check_running() - f: Future[T_Retval] = Future() - self._spawn_task_from_thread(func, args, {}, name, f) - return f - - def start_task( - self, - func: Callable[..., Awaitable[T_Retval]], - *args: object, - name: object = None, - ) -> tuple[Future[T_Retval], Any]: - """ - Start a task in the portal's task group and wait until it signals for readiness. - - This method works the same way as :meth:`.abc.TaskGroup.start`. - - :param func: the target function - :param args: positional arguments passed to ``func`` - :param name: name of the task (will be coerced to a string if not ``None``) - :return: a tuple of (future, task_status_value) where the ``task_status_value`` - is the value passed to ``task_status.started()`` from within the target - function - :rtype: tuple[concurrent.futures.Future[T_Retval], Any] - - .. versionadded:: 3.0 - - """ - - def task_done(future: Future[T_Retval]) -> None: - if not task_status_future.done(): - if future.cancelled(): - task_status_future.cancel() - elif future.exception(): - task_status_future.set_exception(future.exception()) - else: - exc = RuntimeError( - "Task exited without calling task_status.started()" - ) - task_status_future.set_exception(exc) - - self._check_running() - task_status_future: Future = Future() - task_status = _BlockingPortalTaskStatus(task_status_future) - f: Future = Future() - f.add_done_callback(task_done) - self._spawn_task_from_thread(func, args, {"task_status": task_status}, name, f) - return f, task_status_future.result() - - def wrap_async_context_manager( - self, cm: AbstractAsyncContextManager[T_co] - ) -> AbstractContextManager[T_co]: - """ - Wrap an async context manager as a synchronous context manager via this portal. - - Spawns a task that will call both ``__aenter__()`` and ``__aexit__()``, stopping - in the middle until the synchronous context manager exits. - - :param cm: an asynchronous context manager - :return: a synchronous context manager - - .. versionadded:: 2.1 - - """ - return _BlockingAsyncContextManager(cm, self) - - -@dataclass -class BlockingPortalProvider: - """ - A manager for a blocking portal. Used as a context manager. The first thread to - enter this context manager causes a blocking portal to be started with the specific - parameters, and the last thread to exit causes the portal to be shut down. Thus, - there will be exactly one blocking portal running in this context as long as at - least one thread has entered this context manager. - - The parameters are the same as for :func:`~anyio.run`. - - :param backend: name of the backend - :param backend_options: backend options - - .. versionadded:: 4.4 - """ - - backend: str = "asyncio" - backend_options: dict[str, Any] | None = None - _lock: Lock = field(init=False, default_factory=Lock) - _leases: int = field(init=False, default=0) - _portal: BlockingPortal = field(init=False) - _portal_cm: AbstractContextManager[BlockingPortal] | None = field( - init=False, default=None - ) - - def __enter__(self) -> BlockingPortal: - with self._lock: - if self._portal_cm is None: - self._portal_cm = start_blocking_portal( - self.backend, self.backend_options - ) - self._portal = self._portal_cm.__enter__() - - self._leases += 1 - return self._portal - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - portal_cm: AbstractContextManager[BlockingPortal] | None = None - with self._lock: - assert self._portal_cm - assert self._leases > 0 - self._leases -= 1 - if not self._leases: - portal_cm = self._portal_cm - self._portal_cm = None - del self._portal - - if portal_cm: - portal_cm.__exit__(None, None, None) - - -@contextmanager -def start_blocking_portal( - backend: str = "asyncio", backend_options: dict[str, Any] | None = None -) -> Generator[BlockingPortal, Any, None]: - """ - Start a new event loop in a new thread and run a blocking portal in its main task. - - The parameters are the same as for :func:`~anyio.run`. - - :param backend: name of the backend - :param backend_options: backend options - :return: a context manager that yields a blocking portal - - .. versionchanged:: 3.0 - Usage as a context manager is now required. - - """ - - async def run_portal() -> None: - async with BlockingPortal() as portal_: - future.set_result(portal_) - await portal_.sleep_until_stopped() - - def run_blocking_portal() -> None: - if future.set_running_or_notify_cancel(): - try: - _eventloop.run( - run_portal, backend=backend, backend_options=backend_options - ) - except BaseException as exc: - if not future.done(): - future.set_exception(exc) - - future: Future[BlockingPortal] = Future() - thread = Thread(target=run_blocking_portal, daemon=True) - thread.start() - try: - cancel_remaining_tasks = False - portal = future.result() - try: - yield portal - except BaseException: - cancel_remaining_tasks = True - raise - finally: - try: - portal.call(portal.stop, cancel_remaining_tasks) - except RuntimeError: - pass - finally: - thread.join() - - -def check_cancelled() -> None: - """ - Check if the cancel scope of the host task's running the current worker thread has - been cancelled. - - If the host task's current cancel scope has indeed been cancelled, the - backend-specific cancellation exception will be raised. - - :raises RuntimeError: if the current thread was not spawned by - :func:`.to_thread.run_sync` - - """ - try: - async_backend: AsyncBackend = threadlocals.current_async_backend - except AttributeError: - raise RuntimeError( - "This function can only be run from an AnyIO worker thread" - ) from None - - async_backend.check_cancelled() diff --git a/myenv/Lib/site-packages/anyio/lowlevel.py b/myenv/Lib/site-packages/anyio/lowlevel.py deleted file mode 100644 index 14c7668..0000000 --- a/myenv/Lib/site-packages/anyio/lowlevel.py +++ /dev/null @@ -1,161 +0,0 @@ -from __future__ import annotations - -import enum -from dataclasses import dataclass -from typing import Any, Generic, Literal, TypeVar, overload -from weakref import WeakKeyDictionary - -from ._core._eventloop import get_async_backend - -T = TypeVar("T") -D = TypeVar("D") - - -async def checkpoint() -> None: - """ - Check for cancellation and allow the scheduler to switch to another task. - - Equivalent to (but more efficient than):: - - await checkpoint_if_cancelled() - await cancel_shielded_checkpoint() - - - .. versionadded:: 3.0 - - """ - await get_async_backend().checkpoint() - - -async def checkpoint_if_cancelled() -> None: - """ - Enter a checkpoint if the enclosing cancel scope has been cancelled. - - This does not allow the scheduler to switch to a different task. - - .. versionadded:: 3.0 - - """ - await get_async_backend().checkpoint_if_cancelled() - - -async def cancel_shielded_checkpoint() -> None: - """ - Allow the scheduler to switch to another task but without checking for cancellation. - - Equivalent to (but potentially more efficient than):: - - with CancelScope(shield=True): - await checkpoint() - - - .. versionadded:: 3.0 - - """ - await get_async_backend().cancel_shielded_checkpoint() - - -def current_token() -> object: - """ - Return a backend specific token object that can be used to get back to the event - loop. - - """ - return get_async_backend().current_token() - - -_run_vars: WeakKeyDictionary[Any, dict[str, Any]] = WeakKeyDictionary() -_token_wrappers: dict[Any, _TokenWrapper] = {} - - -@dataclass(frozen=True) -class _TokenWrapper: - __slots__ = "_token", "__weakref__" - _token: object - - -class _NoValueSet(enum.Enum): - NO_VALUE_SET = enum.auto() - - -class RunvarToken(Generic[T]): - __slots__ = "_var", "_value", "_redeemed" - - def __init__(self, var: RunVar[T], value: T | Literal[_NoValueSet.NO_VALUE_SET]): - self._var = var - self._value: T | Literal[_NoValueSet.NO_VALUE_SET] = value - self._redeemed = False - - -class RunVar(Generic[T]): - """ - Like a :class:`~contextvars.ContextVar`, except scoped to the running event loop. - """ - - __slots__ = "_name", "_default" - - NO_VALUE_SET: Literal[_NoValueSet.NO_VALUE_SET] = _NoValueSet.NO_VALUE_SET - - _token_wrappers: set[_TokenWrapper] = set() - - def __init__( - self, name: str, default: T | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET - ): - self._name = name - self._default = default - - @property - def _current_vars(self) -> dict[str, T]: - token = current_token() - try: - return _run_vars[token] - except KeyError: - run_vars = _run_vars[token] = {} - return run_vars - - @overload - def get(self, default: D) -> T | D: ... - - @overload - def get(self) -> T: ... - - def get( - self, default: D | Literal[_NoValueSet.NO_VALUE_SET] = NO_VALUE_SET - ) -> T | D: - try: - return self._current_vars[self._name] - except KeyError: - if default is not RunVar.NO_VALUE_SET: - return default - elif self._default is not RunVar.NO_VALUE_SET: - return self._default - - raise LookupError( - f'Run variable "{self._name}" has no value and no default set' - ) - - def set(self, value: T) -> RunvarToken[T]: - current_vars = self._current_vars - token = RunvarToken(self, current_vars.get(self._name, RunVar.NO_VALUE_SET)) - current_vars[self._name] = value - return token - - def reset(self, token: RunvarToken[T]) -> None: - if token._var is not self: - raise ValueError("This token does not belong to this RunVar") - - if token._redeemed: - raise ValueError("This token has already been used") - - if token._value is _NoValueSet.NO_VALUE_SET: - try: - del self._current_vars[self._name] - except KeyError: - pass - else: - self._current_vars[self._name] = token._value - - token._redeemed = True - - def __repr__(self) -> str: - return f"" diff --git a/myenv/Lib/site-packages/anyio/py.typed b/myenv/Lib/site-packages/anyio/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/anyio/pytest_plugin.py b/myenv/Lib/site-packages/anyio/pytest_plugin.py deleted file mode 100644 index c9fe1bd..0000000 --- a/myenv/Lib/site-packages/anyio/pytest_plugin.py +++ /dev/null @@ -1,162 +0,0 @@ -from __future__ import annotations - -import sys -from collections.abc import Iterator -from contextlib import ExitStack, contextmanager -from inspect import isasyncgenfunction, iscoroutinefunction -from typing import Any, cast - -import pytest -import sniffio -from _pytest.outcomes import Exit - -from ._core._eventloop import get_all_backends, get_async_backend -from ._core._exceptions import iterate_exceptions -from .abc import TestRunner - -if sys.version_info < (3, 11): - from exceptiongroup import ExceptionGroup - -_current_runner: TestRunner | None = None -_runner_stack: ExitStack | None = None -_runner_leases = 0 - - -def extract_backend_and_options(backend: object) -> tuple[str, dict[str, Any]]: - if isinstance(backend, str): - return backend, {} - elif isinstance(backend, tuple) and len(backend) == 2: - if isinstance(backend[0], str) and isinstance(backend[1], dict): - return cast(tuple[str, dict[str, Any]], backend) - - raise TypeError("anyio_backend must be either a string or tuple of (string, dict)") - - -@contextmanager -def get_runner( - backend_name: str, backend_options: dict[str, Any] -) -> Iterator[TestRunner]: - global _current_runner, _runner_leases, _runner_stack - if _current_runner is None: - asynclib = get_async_backend(backend_name) - _runner_stack = ExitStack() - if sniffio.current_async_library_cvar.get(None) is None: - # Since we're in control of the event loop, we can cache the name of the - # async library - token = sniffio.current_async_library_cvar.set(backend_name) - _runner_stack.callback(sniffio.current_async_library_cvar.reset, token) - - backend_options = backend_options or {} - _current_runner = _runner_stack.enter_context( - asynclib.create_test_runner(backend_options) - ) - - _runner_leases += 1 - try: - yield _current_runner - finally: - _runner_leases -= 1 - if not _runner_leases: - assert _runner_stack is not None - _runner_stack.close() - _runner_stack = _current_runner = None - - -def pytest_configure(config: Any) -> None: - config.addinivalue_line( - "markers", - "anyio: mark the (coroutine function) test to be run " - "asynchronously via anyio.", - ) - - -def pytest_fixture_setup(fixturedef: Any, request: Any) -> None: - def wrapper(*args, anyio_backend, **kwargs): # type: ignore[no-untyped-def] - backend_name, backend_options = extract_backend_and_options(anyio_backend) - if has_backend_arg: - kwargs["anyio_backend"] = anyio_backend - - with get_runner(backend_name, backend_options) as runner: - if isasyncgenfunction(func): - yield from runner.run_asyncgen_fixture(func, kwargs) - else: - yield runner.run_fixture(func, kwargs) - - # Only apply this to coroutine functions and async generator functions in requests - # that involve the anyio_backend fixture - func = fixturedef.func - if isasyncgenfunction(func) or iscoroutinefunction(func): - if "anyio_backend" in request.fixturenames: - has_backend_arg = "anyio_backend" in fixturedef.argnames - fixturedef.func = wrapper - if not has_backend_arg: - fixturedef.argnames += ("anyio_backend",) - - -@pytest.hookimpl(tryfirst=True) -def pytest_pycollect_makeitem(collector: Any, name: Any, obj: Any) -> None: - if collector.istestfunction(obj, name): - inner_func = obj.hypothesis.inner_test if hasattr(obj, "hypothesis") else obj - if iscoroutinefunction(inner_func): - marker = collector.get_closest_marker("anyio") - own_markers = getattr(obj, "pytestmark", ()) - if marker or any(marker.name == "anyio" for marker in own_markers): - pytest.mark.usefixtures("anyio_backend")(obj) - - -@pytest.hookimpl(tryfirst=True) -def pytest_pyfunc_call(pyfuncitem: Any) -> bool | None: - def run_with_hypothesis(**kwargs: Any) -> None: - with get_runner(backend_name, backend_options) as runner: - runner.run_test(original_func, kwargs) - - backend = pyfuncitem.funcargs.get("anyio_backend") - if backend: - backend_name, backend_options = extract_backend_and_options(backend) - - if hasattr(pyfuncitem.obj, "hypothesis"): - # Wrap the inner test function unless it's already wrapped - original_func = pyfuncitem.obj.hypothesis.inner_test - if original_func.__qualname__ != run_with_hypothesis.__qualname__: - if iscoroutinefunction(original_func): - pyfuncitem.obj.hypothesis.inner_test = run_with_hypothesis - - return None - - if iscoroutinefunction(pyfuncitem.obj): - funcargs = pyfuncitem.funcargs - testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} - with get_runner(backend_name, backend_options) as runner: - try: - runner.run_test(pyfuncitem.obj, testargs) - except ExceptionGroup as excgrp: - for exc in iterate_exceptions(excgrp): - if isinstance(exc, (Exit, KeyboardInterrupt, SystemExit)): - raise exc from excgrp - - raise - - return True - - return None - - -@pytest.fixture(scope="module", params=get_all_backends()) -def anyio_backend(request: Any) -> Any: - return request.param - - -@pytest.fixture -def anyio_backend_name(anyio_backend: Any) -> str: - if isinstance(anyio_backend, str): - return anyio_backend - else: - return anyio_backend[0] - - -@pytest.fixture -def anyio_backend_options(anyio_backend: Any) -> dict[str, Any]: - if isinstance(anyio_backend, str): - return {} - else: - return anyio_backend[1] diff --git a/myenv/Lib/site-packages/anyio/streams/__init__.py b/myenv/Lib/site-packages/anyio/streams/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/anyio/streams/buffered.py b/myenv/Lib/site-packages/anyio/streams/buffered.py deleted file mode 100644 index f5d5e83..0000000 --- a/myenv/Lib/site-packages/anyio/streams/buffered.py +++ /dev/null @@ -1,119 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping -from dataclasses import dataclass, field -from typing import Any - -from .. import ClosedResourceError, DelimiterNotFound, EndOfStream, IncompleteRead -from ..abc import AnyByteReceiveStream, ByteReceiveStream - - -@dataclass(eq=False) -class BufferedByteReceiveStream(ByteReceiveStream): - """ - Wraps any bytes-based receive stream and uses a buffer to provide sophisticated - receiving capabilities in the form of a byte stream. - """ - - receive_stream: AnyByteReceiveStream - _buffer: bytearray = field(init=False, default_factory=bytearray) - _closed: bool = field(init=False, default=False) - - async def aclose(self) -> None: - await self.receive_stream.aclose() - self._closed = True - - @property - def buffer(self) -> bytes: - """The bytes currently in the buffer.""" - return bytes(self._buffer) - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return self.receive_stream.extra_attributes - - async def receive(self, max_bytes: int = 65536) -> bytes: - if self._closed: - raise ClosedResourceError - - if self._buffer: - chunk = bytes(self._buffer[:max_bytes]) - del self._buffer[:max_bytes] - return chunk - elif isinstance(self.receive_stream, ByteReceiveStream): - return await self.receive_stream.receive(max_bytes) - else: - # With a bytes-oriented object stream, we need to handle any surplus bytes - # we get from the receive() call - chunk = await self.receive_stream.receive() - if len(chunk) > max_bytes: - # Save the surplus bytes in the buffer - self._buffer.extend(chunk[max_bytes:]) - return chunk[:max_bytes] - else: - return chunk - - async def receive_exactly(self, nbytes: int) -> bytes: - """ - Read exactly the given amount of bytes from the stream. - - :param nbytes: the number of bytes to read - :return: the bytes read - :raises ~anyio.IncompleteRead: if the stream was closed before the requested - amount of bytes could be read from the stream - - """ - while True: - remaining = nbytes - len(self._buffer) - if remaining <= 0: - retval = self._buffer[:nbytes] - del self._buffer[:nbytes] - return bytes(retval) - - try: - if isinstance(self.receive_stream, ByteReceiveStream): - chunk = await self.receive_stream.receive(remaining) - else: - chunk = await self.receive_stream.receive() - except EndOfStream as exc: - raise IncompleteRead from exc - - self._buffer.extend(chunk) - - async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: - """ - Read from the stream until the delimiter is found or max_bytes have been read. - - :param delimiter: the marker to look for in the stream - :param max_bytes: maximum number of bytes that will be read before raising - :exc:`~anyio.DelimiterNotFound` - :return: the bytes read (not including the delimiter) - :raises ~anyio.IncompleteRead: if the stream was closed before the delimiter - was found - :raises ~anyio.DelimiterNotFound: if the delimiter is not found within the - bytes read up to the maximum allowed - - """ - delimiter_size = len(delimiter) - offset = 0 - while True: - # Check if the delimiter can be found in the current buffer - index = self._buffer.find(delimiter, offset) - if index >= 0: - found = self._buffer[:index] - del self._buffer[: index + len(delimiter) :] - return bytes(found) - - # Check if the buffer is already at or over the limit - if len(self._buffer) >= max_bytes: - raise DelimiterNotFound(max_bytes) - - # Read more data into the buffer from the socket - try: - data = await self.receive_stream.receive() - except EndOfStream as exc: - raise IncompleteRead from exc - - # Move the offset forward and add the new data to the buffer - offset = max(len(self._buffer) - delimiter_size + 1, 0) - self._buffer.extend(data) diff --git a/myenv/Lib/site-packages/anyio/streams/file.py b/myenv/Lib/site-packages/anyio/streams/file.py deleted file mode 100644 index f492464..0000000 --- a/myenv/Lib/site-packages/anyio/streams/file.py +++ /dev/null @@ -1,148 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping -from io import SEEK_SET, UnsupportedOperation -from os import PathLike -from pathlib import Path -from typing import Any, BinaryIO, cast - -from .. import ( - BrokenResourceError, - ClosedResourceError, - EndOfStream, - TypedAttributeSet, - to_thread, - typed_attribute, -) -from ..abc import ByteReceiveStream, ByteSendStream - - -class FileStreamAttribute(TypedAttributeSet): - #: the open file descriptor - file: BinaryIO = typed_attribute() - #: the path of the file on the file system, if available (file must be a real file) - path: Path = typed_attribute() - #: the file number, if available (file must be a real file or a TTY) - fileno: int = typed_attribute() - - -class _BaseFileStream: - def __init__(self, file: BinaryIO): - self._file = file - - async def aclose(self) -> None: - await to_thread.run_sync(self._file.close) - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - attributes: dict[Any, Callable[[], Any]] = { - FileStreamAttribute.file: lambda: self._file, - } - - if hasattr(self._file, "name"): - attributes[FileStreamAttribute.path] = lambda: Path(self._file.name) - - try: - self._file.fileno() - except UnsupportedOperation: - pass - else: - attributes[FileStreamAttribute.fileno] = lambda: self._file.fileno() - - return attributes - - -class FileReadStream(_BaseFileStream, ByteReceiveStream): - """ - A byte stream that reads from a file in the file system. - - :param file: a file that has been opened for reading in binary mode - - .. versionadded:: 3.0 - """ - - @classmethod - async def from_path(cls, path: str | PathLike[str]) -> FileReadStream: - """ - Create a file read stream by opening the given file. - - :param path: path of the file to read from - - """ - file = await to_thread.run_sync(Path(path).open, "rb") - return cls(cast(BinaryIO, file)) - - async def receive(self, max_bytes: int = 65536) -> bytes: - try: - data = await to_thread.run_sync(self._file.read, max_bytes) - except ValueError: - raise ClosedResourceError from None - except OSError as exc: - raise BrokenResourceError from exc - - if data: - return data - else: - raise EndOfStream - - async def seek(self, position: int, whence: int = SEEK_SET) -> int: - """ - Seek the file to the given position. - - .. seealso:: :meth:`io.IOBase.seek` - - .. note:: Not all file descriptors are seekable. - - :param position: position to seek the file to - :param whence: controls how ``position`` is interpreted - :return: the new absolute position - :raises OSError: if the file is not seekable - - """ - return await to_thread.run_sync(self._file.seek, position, whence) - - async def tell(self) -> int: - """ - Return the current stream position. - - .. note:: Not all file descriptors are seekable. - - :return: the current absolute position - :raises OSError: if the file is not seekable - - """ - return await to_thread.run_sync(self._file.tell) - - -class FileWriteStream(_BaseFileStream, ByteSendStream): - """ - A byte stream that writes to a file in the file system. - - :param file: a file that has been opened for writing in binary mode - - .. versionadded:: 3.0 - """ - - @classmethod - async def from_path( - cls, path: str | PathLike[str], append: bool = False - ) -> FileWriteStream: - """ - Create a file write stream by opening the given file for writing. - - :param path: path of the file to write to - :param append: if ``True``, open the file for appending; if ``False``, any - existing file at the given path will be truncated - - """ - mode = "ab" if append else "wb" - file = await to_thread.run_sync(Path(path).open, mode) - return cls(cast(BinaryIO, file)) - - async def send(self, item: bytes) -> None: - try: - await to_thread.run_sync(self._file.write, item) - except ValueError: - raise ClosedResourceError from None - except OSError as exc: - raise BrokenResourceError from exc diff --git a/myenv/Lib/site-packages/anyio/streams/memory.py b/myenv/Lib/site-packages/anyio/streams/memory.py deleted file mode 100644 index b547aa6..0000000 --- a/myenv/Lib/site-packages/anyio/streams/memory.py +++ /dev/null @@ -1,317 +0,0 @@ -from __future__ import annotations - -import warnings -from collections import OrderedDict, deque -from dataclasses import dataclass, field -from types import TracebackType -from typing import Generic, NamedTuple, TypeVar - -from .. import ( - BrokenResourceError, - ClosedResourceError, - EndOfStream, - WouldBlock, -) -from .._core._testing import TaskInfo, get_current_task -from ..abc import Event, ObjectReceiveStream, ObjectSendStream -from ..lowlevel import checkpoint - -T_Item = TypeVar("T_Item") -T_co = TypeVar("T_co", covariant=True) -T_contra = TypeVar("T_contra", contravariant=True) - - -class MemoryObjectStreamStatistics(NamedTuple): - current_buffer_used: int #: number of items stored in the buffer - #: maximum number of items that can be stored on this stream (or :data:`math.inf`) - max_buffer_size: float - open_send_streams: int #: number of unclosed clones of the send stream - open_receive_streams: int #: number of unclosed clones of the receive stream - #: number of tasks blocked on :meth:`MemoryObjectSendStream.send` - tasks_waiting_send: int - #: number of tasks blocked on :meth:`MemoryObjectReceiveStream.receive` - tasks_waiting_receive: int - - -@dataclass(eq=False) -class MemoryObjectItemReceiver(Generic[T_Item]): - task_info: TaskInfo = field(init=False, default_factory=get_current_task) - item: T_Item = field(init=False) - - def __repr__(self) -> str: - # When item is not defined, we get following error with default __repr__: - # AttributeError: 'MemoryObjectItemReceiver' object has no attribute 'item' - item = getattr(self, "item", None) - return f"{self.__class__.__name__}(task_info={self.task_info}, item={item!r})" - - -@dataclass(eq=False) -class MemoryObjectStreamState(Generic[T_Item]): - max_buffer_size: float = field() - buffer: deque[T_Item] = field(init=False, default_factory=deque) - open_send_channels: int = field(init=False, default=0) - open_receive_channels: int = field(init=False, default=0) - waiting_receivers: OrderedDict[Event, MemoryObjectItemReceiver[T_Item]] = field( - init=False, default_factory=OrderedDict - ) - waiting_senders: OrderedDict[Event, T_Item] = field( - init=False, default_factory=OrderedDict - ) - - def statistics(self) -> MemoryObjectStreamStatistics: - return MemoryObjectStreamStatistics( - len(self.buffer), - self.max_buffer_size, - self.open_send_channels, - self.open_receive_channels, - len(self.waiting_senders), - len(self.waiting_receivers), - ) - - -@dataclass(eq=False) -class MemoryObjectReceiveStream(Generic[T_co], ObjectReceiveStream[T_co]): - _state: MemoryObjectStreamState[T_co] - _closed: bool = field(init=False, default=False) - - def __post_init__(self) -> None: - self._state.open_receive_channels += 1 - - def receive_nowait(self) -> T_co: - """ - Receive the next item if it can be done without waiting. - - :return: the received item - :raises ~anyio.ClosedResourceError: if this send stream has been closed - :raises ~anyio.EndOfStream: if the buffer is empty and this stream has been - closed from the sending end - :raises ~anyio.WouldBlock: if there are no items in the buffer and no tasks - waiting to send - - """ - if self._closed: - raise ClosedResourceError - - if self._state.waiting_senders: - # Get the item from the next sender - send_event, item = self._state.waiting_senders.popitem(last=False) - self._state.buffer.append(item) - send_event.set() - - if self._state.buffer: - return self._state.buffer.popleft() - elif not self._state.open_send_channels: - raise EndOfStream - - raise WouldBlock - - async def receive(self) -> T_co: - await checkpoint() - try: - return self.receive_nowait() - except WouldBlock: - # Add ourselves in the queue - receive_event = Event() - receiver = MemoryObjectItemReceiver[T_co]() - self._state.waiting_receivers[receive_event] = receiver - - try: - await receive_event.wait() - finally: - self._state.waiting_receivers.pop(receive_event, None) - - try: - return receiver.item - except AttributeError: - raise EndOfStream - - def clone(self) -> MemoryObjectReceiveStream[T_co]: - """ - Create a clone of this receive stream. - - Each clone can be closed separately. Only when all clones have been closed will - the receiving end of the memory stream be considered closed by the sending ends. - - :return: the cloned stream - - """ - if self._closed: - raise ClosedResourceError - - return MemoryObjectReceiveStream(_state=self._state) - - def close(self) -> None: - """ - Close the stream. - - This works the exact same way as :meth:`aclose`, but is provided as a special - case for the benefit of synchronous callbacks. - - """ - if not self._closed: - self._closed = True - self._state.open_receive_channels -= 1 - if self._state.open_receive_channels == 0: - send_events = list(self._state.waiting_senders.keys()) - for event in send_events: - event.set() - - async def aclose(self) -> None: - self.close() - - def statistics(self) -> MemoryObjectStreamStatistics: - """ - Return statistics about the current state of this stream. - - .. versionadded:: 3.0 - """ - return self._state.statistics() - - def __enter__(self) -> MemoryObjectReceiveStream[T_co]: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.close() - - def __del__(self) -> None: - if not self._closed: - warnings.warn( - f"Unclosed <{self.__class__.__name__} at {id(self):x}>", - ResourceWarning, - source=self, - ) - - -@dataclass(eq=False) -class MemoryObjectSendStream(Generic[T_contra], ObjectSendStream[T_contra]): - _state: MemoryObjectStreamState[T_contra] - _closed: bool = field(init=False, default=False) - - def __post_init__(self) -> None: - self._state.open_send_channels += 1 - - def send_nowait(self, item: T_contra) -> None: - """ - Send an item immediately if it can be done without waiting. - - :param item: the item to send - :raises ~anyio.ClosedResourceError: if this send stream has been closed - :raises ~anyio.BrokenResourceError: if the stream has been closed from the - receiving end - :raises ~anyio.WouldBlock: if the buffer is full and there are no tasks waiting - to receive - - """ - if self._closed: - raise ClosedResourceError - if not self._state.open_receive_channels: - raise BrokenResourceError - - while self._state.waiting_receivers: - receive_event, receiver = self._state.waiting_receivers.popitem(last=False) - if not receiver.task_info.has_pending_cancellation(): - receiver.item = item - receive_event.set() - return - - if len(self._state.buffer) < self._state.max_buffer_size: - self._state.buffer.append(item) - else: - raise WouldBlock - - async def send(self, item: T_contra) -> None: - """ - Send an item to the stream. - - If the buffer is full, this method blocks until there is again room in the - buffer or the item can be sent directly to a receiver. - - :param item: the item to send - :raises ~anyio.ClosedResourceError: if this send stream has been closed - :raises ~anyio.BrokenResourceError: if the stream has been closed from the - receiving end - - """ - await checkpoint() - try: - self.send_nowait(item) - except WouldBlock: - # Wait until there's someone on the receiving end - send_event = Event() - self._state.waiting_senders[send_event] = item - try: - await send_event.wait() - except BaseException: - self._state.waiting_senders.pop(send_event, None) - raise - - if send_event in self._state.waiting_senders: - del self._state.waiting_senders[send_event] - raise BrokenResourceError from None - - def clone(self) -> MemoryObjectSendStream[T_contra]: - """ - Create a clone of this send stream. - - Each clone can be closed separately. Only when all clones have been closed will - the sending end of the memory stream be considered closed by the receiving ends. - - :return: the cloned stream - - """ - if self._closed: - raise ClosedResourceError - - return MemoryObjectSendStream(_state=self._state) - - def close(self) -> None: - """ - Close the stream. - - This works the exact same way as :meth:`aclose`, but is provided as a special - case for the benefit of synchronous callbacks. - - """ - if not self._closed: - self._closed = True - self._state.open_send_channels -= 1 - if self._state.open_send_channels == 0: - receive_events = list(self._state.waiting_receivers.keys()) - self._state.waiting_receivers.clear() - for event in receive_events: - event.set() - - async def aclose(self) -> None: - self.close() - - def statistics(self) -> MemoryObjectStreamStatistics: - """ - Return statistics about the current state of this stream. - - .. versionadded:: 3.0 - """ - return self._state.statistics() - - def __enter__(self) -> MemoryObjectSendStream[T_contra]: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.close() - - def __del__(self) -> None: - if not self._closed: - warnings.warn( - f"Unclosed <{self.__class__.__name__} at {id(self):x}>", - ResourceWarning, - source=self, - ) diff --git a/myenv/Lib/site-packages/anyio/streams/stapled.py b/myenv/Lib/site-packages/anyio/streams/stapled.py deleted file mode 100644 index 80f64a2..0000000 --- a/myenv/Lib/site-packages/anyio/streams/stapled.py +++ /dev/null @@ -1,141 +0,0 @@ -from __future__ import annotations - -from collections.abc import Callable, Mapping, Sequence -from dataclasses import dataclass -from typing import Any, Generic, TypeVar - -from ..abc import ( - ByteReceiveStream, - ByteSendStream, - ByteStream, - Listener, - ObjectReceiveStream, - ObjectSendStream, - ObjectStream, - TaskGroup, -) - -T_Item = TypeVar("T_Item") -T_Stream = TypeVar("T_Stream") - - -@dataclass(eq=False) -class StapledByteStream(ByteStream): - """ - Combines two byte streams into a single, bidirectional byte stream. - - Extra attributes will be provided from both streams, with the receive stream - providing the values in case of a conflict. - - :param ByteSendStream send_stream: the sending byte stream - :param ByteReceiveStream receive_stream: the receiving byte stream - """ - - send_stream: ByteSendStream - receive_stream: ByteReceiveStream - - async def receive(self, max_bytes: int = 65536) -> bytes: - return await self.receive_stream.receive(max_bytes) - - async def send(self, item: bytes) -> None: - await self.send_stream.send(item) - - async def send_eof(self) -> None: - await self.send_stream.aclose() - - async def aclose(self) -> None: - await self.send_stream.aclose() - await self.receive_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self.send_stream.extra_attributes, - **self.receive_stream.extra_attributes, - } - - -@dataclass(eq=False) -class StapledObjectStream(Generic[T_Item], ObjectStream[T_Item]): - """ - Combines two object streams into a single, bidirectional object stream. - - Extra attributes will be provided from both streams, with the receive stream - providing the values in case of a conflict. - - :param ObjectSendStream send_stream: the sending object stream - :param ObjectReceiveStream receive_stream: the receiving object stream - """ - - send_stream: ObjectSendStream[T_Item] - receive_stream: ObjectReceiveStream[T_Item] - - async def receive(self) -> T_Item: - return await self.receive_stream.receive() - - async def send(self, item: T_Item) -> None: - await self.send_stream.send(item) - - async def send_eof(self) -> None: - await self.send_stream.aclose() - - async def aclose(self) -> None: - await self.send_stream.aclose() - await self.receive_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self.send_stream.extra_attributes, - **self.receive_stream.extra_attributes, - } - - -@dataclass(eq=False) -class MultiListener(Generic[T_Stream], Listener[T_Stream]): - """ - Combines multiple listeners into one, serving connections from all of them at once. - - Any MultiListeners in the given collection of listeners will have their listeners - moved into this one. - - Extra attributes are provided from each listener, with each successive listener - overriding any conflicting attributes from the previous one. - - :param listeners: listeners to serve - :type listeners: Sequence[Listener[T_Stream]] - """ - - listeners: Sequence[Listener[T_Stream]] - - def __post_init__(self) -> None: - listeners: list[Listener[T_Stream]] = [] - for listener in self.listeners: - if isinstance(listener, MultiListener): - listeners.extend(listener.listeners) - del listener.listeners[:] # type: ignore[attr-defined] - else: - listeners.append(listener) - - self.listeners = listeners - - async def serve( - self, handler: Callable[[T_Stream], Any], task_group: TaskGroup | None = None - ) -> None: - from .. import create_task_group - - async with create_task_group() as tg: - for listener in self.listeners: - tg.start_soon(listener.serve, handler, task_group) - - async def aclose(self) -> None: - for listener in self.listeners: - await listener.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - attributes: dict = {} - for listener in self.listeners: - attributes.update(listener.extra_attributes) - - return attributes diff --git a/myenv/Lib/site-packages/anyio/streams/text.py b/myenv/Lib/site-packages/anyio/streams/text.py deleted file mode 100644 index f1a1127..0000000 --- a/myenv/Lib/site-packages/anyio/streams/text.py +++ /dev/null @@ -1,147 +0,0 @@ -from __future__ import annotations - -import codecs -from collections.abc import Callable, Mapping -from dataclasses import InitVar, dataclass, field -from typing import Any - -from ..abc import ( - AnyByteReceiveStream, - AnyByteSendStream, - AnyByteStream, - ObjectReceiveStream, - ObjectSendStream, - ObjectStream, -) - - -@dataclass(eq=False) -class TextReceiveStream(ObjectReceiveStream[str]): - """ - Stream wrapper that decodes bytes to strings using the given encoding. - - Decoding is done using :class:`~codecs.IncrementalDecoder` which returns any - completely received unicode characters as soon as they come in. - - :param transport_stream: any bytes-based receive stream - :param encoding: character encoding to use for decoding bytes to strings (defaults - to ``utf-8``) - :param errors: handling scheme for decoding errors (defaults to ``strict``; see the - `codecs module documentation`_ for a comprehensive list of options) - - .. _codecs module documentation: - https://docs.python.org/3/library/codecs.html#codec-objects - """ - - transport_stream: AnyByteReceiveStream - encoding: InitVar[str] = "utf-8" - errors: InitVar[str] = "strict" - _decoder: codecs.IncrementalDecoder = field(init=False) - - def __post_init__(self, encoding: str, errors: str) -> None: - decoder_class = codecs.getincrementaldecoder(encoding) - self._decoder = decoder_class(errors=errors) - - async def receive(self) -> str: - while True: - chunk = await self.transport_stream.receive() - decoded = self._decoder.decode(chunk) - if decoded: - return decoded - - async def aclose(self) -> None: - await self.transport_stream.aclose() - self._decoder.reset() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return self.transport_stream.extra_attributes - - -@dataclass(eq=False) -class TextSendStream(ObjectSendStream[str]): - """ - Sends strings to the wrapped stream as bytes using the given encoding. - - :param AnyByteSendStream transport_stream: any bytes-based send stream - :param str encoding: character encoding to use for encoding strings to bytes - (defaults to ``utf-8``) - :param str errors: handling scheme for encoding errors (defaults to ``strict``; see - the `codecs module documentation`_ for a comprehensive list of options) - - .. _codecs module documentation: - https://docs.python.org/3/library/codecs.html#codec-objects - """ - - transport_stream: AnyByteSendStream - encoding: InitVar[str] = "utf-8" - errors: str = "strict" - _encoder: Callable[..., tuple[bytes, int]] = field(init=False) - - def __post_init__(self, encoding: str) -> None: - self._encoder = codecs.getencoder(encoding) - - async def send(self, item: str) -> None: - encoded = self._encoder(item, self.errors)[0] - await self.transport_stream.send(encoded) - - async def aclose(self) -> None: - await self.transport_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return self.transport_stream.extra_attributes - - -@dataclass(eq=False) -class TextStream(ObjectStream[str]): - """ - A bidirectional stream that decodes bytes to strings on receive and encodes strings - to bytes on send. - - Extra attributes will be provided from both streams, with the receive stream - providing the values in case of a conflict. - - :param AnyByteStream transport_stream: any bytes-based stream - :param str encoding: character encoding to use for encoding/decoding strings to/from - bytes (defaults to ``utf-8``) - :param str errors: handling scheme for encoding errors (defaults to ``strict``; see - the `codecs module documentation`_ for a comprehensive list of options) - - .. _codecs module documentation: - https://docs.python.org/3/library/codecs.html#codec-objects - """ - - transport_stream: AnyByteStream - encoding: InitVar[str] = "utf-8" - errors: InitVar[str] = "strict" - _receive_stream: TextReceiveStream = field(init=False) - _send_stream: TextSendStream = field(init=False) - - def __post_init__(self, encoding: str, errors: str) -> None: - self._receive_stream = TextReceiveStream( - self.transport_stream, encoding=encoding, errors=errors - ) - self._send_stream = TextSendStream( - self.transport_stream, encoding=encoding, errors=errors - ) - - async def receive(self) -> str: - return await self._receive_stream.receive() - - async def send(self, item: str) -> None: - await self._send_stream.send(item) - - async def send_eof(self) -> None: - await self.transport_stream.send_eof() - - async def aclose(self) -> None: - await self._send_stream.aclose() - await self._receive_stream.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self._send_stream.extra_attributes, - **self._receive_stream.extra_attributes, - } diff --git a/myenv/Lib/site-packages/anyio/streams/tls.py b/myenv/Lib/site-packages/anyio/streams/tls.py deleted file mode 100644 index 83240b4..0000000 --- a/myenv/Lib/site-packages/anyio/streams/tls.py +++ /dev/null @@ -1,338 +0,0 @@ -from __future__ import annotations - -import logging -import re -import ssl -import sys -from collections.abc import Callable, Mapping -from dataclasses import dataclass -from functools import wraps -from typing import Any, TypeVar - -from .. import ( - BrokenResourceError, - EndOfStream, - aclose_forcefully, - get_cancelled_exc_class, -) -from .._core._typedattr import TypedAttributeSet, typed_attribute -from ..abc import AnyByteStream, ByteStream, Listener, TaskGroup - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") -_PCTRTT = tuple[tuple[str, str], ...] -_PCTRTTT = tuple[_PCTRTT, ...] - - -class TLSAttribute(TypedAttributeSet): - """Contains Transport Layer Security related attributes.""" - - #: the selected ALPN protocol - alpn_protocol: str | None = typed_attribute() - #: the channel binding for type ``tls-unique`` - channel_binding_tls_unique: bytes = typed_attribute() - #: the selected cipher - cipher: tuple[str, str, int] = typed_attribute() - #: the peer certificate in dictionary form (see :meth:`ssl.SSLSocket.getpeercert` - # for more information) - peer_certificate: None | (dict[str, str | _PCTRTTT | _PCTRTT]) = typed_attribute() - #: the peer certificate in binary form - peer_certificate_binary: bytes | None = typed_attribute() - #: ``True`` if this is the server side of the connection - server_side: bool = typed_attribute() - #: ciphers shared by the client during the TLS handshake (``None`` if this is the - #: client side) - shared_ciphers: list[tuple[str, str, int]] | None = typed_attribute() - #: the :class:`~ssl.SSLObject` used for encryption - ssl_object: ssl.SSLObject = typed_attribute() - #: ``True`` if this stream does (and expects) a closing TLS handshake when the - #: stream is being closed - standard_compatible: bool = typed_attribute() - #: the TLS protocol version (e.g. ``TLSv1.2``) - tls_version: str = typed_attribute() - - -@dataclass(eq=False) -class TLSStream(ByteStream): - """ - A stream wrapper that encrypts all sent data and decrypts received data. - - This class has no public initializer; use :meth:`wrap` instead. - All extra attributes from :class:`~TLSAttribute` are supported. - - :var AnyByteStream transport_stream: the wrapped stream - - """ - - transport_stream: AnyByteStream - standard_compatible: bool - _ssl_object: ssl.SSLObject - _read_bio: ssl.MemoryBIO - _write_bio: ssl.MemoryBIO - - @classmethod - async def wrap( - cls, - transport_stream: AnyByteStream, - *, - server_side: bool | None = None, - hostname: str | None = None, - ssl_context: ssl.SSLContext | None = None, - standard_compatible: bool = True, - ) -> TLSStream: - """ - Wrap an existing stream with Transport Layer Security. - - This performs a TLS handshake with the peer. - - :param transport_stream: a bytes-transporting stream to wrap - :param server_side: ``True`` if this is the server side of the connection, - ``False`` if this is the client side (if omitted, will be set to ``False`` - if ``hostname`` has been provided, ``False`` otherwise). Used only to create - a default context when an explicit context has not been provided. - :param hostname: host name of the peer (if host name checking is desired) - :param ssl_context: the SSLContext object to use (if not provided, a secure - default will be created) - :param standard_compatible: if ``False``, skip the closing handshake when - closing the connection, and don't raise an exception if the peer does the - same - :raises ~ssl.SSLError: if the TLS handshake fails - - """ - if server_side is None: - server_side = not hostname - - if not ssl_context: - purpose = ( - ssl.Purpose.CLIENT_AUTH if server_side else ssl.Purpose.SERVER_AUTH - ) - ssl_context = ssl.create_default_context(purpose) - - # Re-enable detection of unexpected EOFs if it was disabled by Python - if hasattr(ssl, "OP_IGNORE_UNEXPECTED_EOF"): - ssl_context.options &= ~ssl.OP_IGNORE_UNEXPECTED_EOF - - bio_in = ssl.MemoryBIO() - bio_out = ssl.MemoryBIO() - ssl_object = ssl_context.wrap_bio( - bio_in, bio_out, server_side=server_side, server_hostname=hostname - ) - wrapper = cls( - transport_stream=transport_stream, - standard_compatible=standard_compatible, - _ssl_object=ssl_object, - _read_bio=bio_in, - _write_bio=bio_out, - ) - await wrapper._call_sslobject_method(ssl_object.do_handshake) - return wrapper - - async def _call_sslobject_method( - self, func: Callable[[Unpack[PosArgsT]], T_Retval], *args: Unpack[PosArgsT] - ) -> T_Retval: - while True: - try: - result = func(*args) - except ssl.SSLWantReadError: - try: - # Flush any pending writes first - if self._write_bio.pending: - await self.transport_stream.send(self._write_bio.read()) - - data = await self.transport_stream.receive() - except EndOfStream: - self._read_bio.write_eof() - except OSError as exc: - self._read_bio.write_eof() - self._write_bio.write_eof() - raise BrokenResourceError from exc - else: - self._read_bio.write(data) - except ssl.SSLWantWriteError: - await self.transport_stream.send(self._write_bio.read()) - except ssl.SSLSyscallError as exc: - self._read_bio.write_eof() - self._write_bio.write_eof() - raise BrokenResourceError from exc - except ssl.SSLError as exc: - self._read_bio.write_eof() - self._write_bio.write_eof() - if ( - isinstance(exc, ssl.SSLEOFError) - or "UNEXPECTED_EOF_WHILE_READING" in exc.strerror - ): - if self.standard_compatible: - raise BrokenResourceError from exc - else: - raise EndOfStream from None - - raise - else: - # Flush any pending writes first - if self._write_bio.pending: - await self.transport_stream.send(self._write_bio.read()) - - return result - - async def unwrap(self) -> tuple[AnyByteStream, bytes]: - """ - Does the TLS closing handshake. - - :return: a tuple of (wrapped byte stream, bytes left in the read buffer) - - """ - await self._call_sslobject_method(self._ssl_object.unwrap) - self._read_bio.write_eof() - self._write_bio.write_eof() - return self.transport_stream, self._read_bio.read() - - async def aclose(self) -> None: - if self.standard_compatible: - try: - await self.unwrap() - except BaseException: - await aclose_forcefully(self.transport_stream) - raise - - await self.transport_stream.aclose() - - async def receive(self, max_bytes: int = 65536) -> bytes: - data = await self._call_sslobject_method(self._ssl_object.read, max_bytes) - if not data: - raise EndOfStream - - return data - - async def send(self, item: bytes) -> None: - await self._call_sslobject_method(self._ssl_object.write, item) - - async def send_eof(self) -> None: - tls_version = self.extra(TLSAttribute.tls_version) - match = re.match(r"TLSv(\d+)(?:\.(\d+))?", tls_version) - if match: - major, minor = int(match.group(1)), int(match.group(2) or 0) - if (major, minor) < (1, 3): - raise NotImplementedError( - f"send_eof() requires at least TLSv1.3; current " - f"session uses {tls_version}" - ) - - raise NotImplementedError( - "send_eof() has not yet been implemented for TLS streams" - ) - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - **self.transport_stream.extra_attributes, - TLSAttribute.alpn_protocol: self._ssl_object.selected_alpn_protocol, - TLSAttribute.channel_binding_tls_unique: ( - self._ssl_object.get_channel_binding - ), - TLSAttribute.cipher: self._ssl_object.cipher, - TLSAttribute.peer_certificate: lambda: self._ssl_object.getpeercert(False), - TLSAttribute.peer_certificate_binary: lambda: self._ssl_object.getpeercert( - True - ), - TLSAttribute.server_side: lambda: self._ssl_object.server_side, - TLSAttribute.shared_ciphers: lambda: self._ssl_object.shared_ciphers() - if self._ssl_object.server_side - else None, - TLSAttribute.standard_compatible: lambda: self.standard_compatible, - TLSAttribute.ssl_object: lambda: self._ssl_object, - TLSAttribute.tls_version: self._ssl_object.version, - } - - -@dataclass(eq=False) -class TLSListener(Listener[TLSStream]): - """ - A convenience listener that wraps another listener and auto-negotiates a TLS session - on every accepted connection. - - If the TLS handshake times out or raises an exception, - :meth:`handle_handshake_error` is called to do whatever post-mortem processing is - deemed necessary. - - Supports only the :attr:`~TLSAttribute.standard_compatible` extra attribute. - - :param Listener listener: the listener to wrap - :param ssl_context: the SSL context object - :param standard_compatible: a flag passed through to :meth:`TLSStream.wrap` - :param handshake_timeout: time limit for the TLS handshake - (passed to :func:`~anyio.fail_after`) - """ - - listener: Listener[Any] - ssl_context: ssl.SSLContext - standard_compatible: bool = True - handshake_timeout: float = 30 - - @staticmethod - async def handle_handshake_error(exc: BaseException, stream: AnyByteStream) -> None: - """ - Handle an exception raised during the TLS handshake. - - This method does 3 things: - - #. Forcefully closes the original stream - #. Logs the exception (unless it was a cancellation exception) using the - ``anyio.streams.tls`` logger - #. Reraises the exception if it was a base exception or a cancellation exception - - :param exc: the exception - :param stream: the original stream - - """ - await aclose_forcefully(stream) - - # Log all except cancellation exceptions - if not isinstance(exc, get_cancelled_exc_class()): - # CPython (as of 3.11.5) returns incorrect `sys.exc_info()` here when using - # any asyncio implementation, so we explicitly pass the exception to log - # (https://github.com/python/cpython/issues/108668). Trio does not have this - # issue because it works around the CPython bug. - logging.getLogger(__name__).exception( - "Error during TLS handshake", exc_info=exc - ) - - # Only reraise base exceptions and cancellation exceptions - if not isinstance(exc, Exception) or isinstance(exc, get_cancelled_exc_class()): - raise - - async def serve( - self, - handler: Callable[[TLSStream], Any], - task_group: TaskGroup | None = None, - ) -> None: - @wraps(handler) - async def handler_wrapper(stream: AnyByteStream) -> None: - from .. import fail_after - - try: - with fail_after(self.handshake_timeout): - wrapped_stream = await TLSStream.wrap( - stream, - ssl_context=self.ssl_context, - standard_compatible=self.standard_compatible, - ) - except BaseException as exc: - await self.handle_handshake_error(exc, stream) - else: - await handler(wrapped_stream) - - await self.listener.serve(handler_wrapper, task_group) - - async def aclose(self) -> None: - await self.listener.aclose() - - @property - def extra_attributes(self) -> Mapping[Any, Callable[[], Any]]: - return { - TLSAttribute.standard_compatible: lambda: self.standard_compatible, - } diff --git a/myenv/Lib/site-packages/anyio/to_process.py b/myenv/Lib/site-packages/anyio/to_process.py deleted file mode 100644 index 5050dee..0000000 --- a/myenv/Lib/site-packages/anyio/to_process.py +++ /dev/null @@ -1,258 +0,0 @@ -from __future__ import annotations - -import os -import pickle -import subprocess -import sys -from collections import deque -from collections.abc import Callable -from importlib.util import module_from_spec, spec_from_file_location -from typing import TypeVar, cast - -from ._core._eventloop import current_time, get_async_backend, get_cancelled_exc_class -from ._core._exceptions import BrokenWorkerProcess -from ._core._subprocesses import open_process -from ._core._synchronization import CapacityLimiter -from ._core._tasks import CancelScope, fail_after -from .abc import ByteReceiveStream, ByteSendStream, Process -from .lowlevel import RunVar, checkpoint_if_cancelled -from .streams.buffered import BufferedByteReceiveStream - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -WORKER_MAX_IDLE_TIME = 300 # 5 minutes - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") - -_process_pool_workers: RunVar[set[Process]] = RunVar("_process_pool_workers") -_process_pool_idle_workers: RunVar[deque[tuple[Process, float]]] = RunVar( - "_process_pool_idle_workers" -) -_default_process_limiter: RunVar[CapacityLimiter] = RunVar("_default_process_limiter") - - -async def run_sync( - func: Callable[[Unpack[PosArgsT]], T_Retval], - *args: Unpack[PosArgsT], - cancellable: bool = False, - limiter: CapacityLimiter | None = None, -) -> T_Retval: - """ - Call the given function with the given arguments in a worker process. - - If the ``cancellable`` option is enabled and the task waiting for its completion is - cancelled, the worker process running it will be abruptly terminated using SIGKILL - (or ``terminateProcess()`` on Windows). - - :param func: a callable - :param args: positional arguments for the callable - :param cancellable: ``True`` to allow cancellation of the operation while it's - running - :param limiter: capacity limiter to use to limit the total amount of processes - running (if omitted, the default limiter is used) - :return: an awaitable that yields the return value of the function. - - """ - - async def send_raw_command(pickled_cmd: bytes) -> object: - try: - await stdin.send(pickled_cmd) - response = await buffered.receive_until(b"\n", 50) - status, length = response.split(b" ") - if status not in (b"RETURN", b"EXCEPTION"): - raise RuntimeError( - f"Worker process returned unexpected response: {response!r}" - ) - - pickled_response = await buffered.receive_exactly(int(length)) - except BaseException as exc: - workers.discard(process) - try: - process.kill() - with CancelScope(shield=True): - await process.aclose() - except ProcessLookupError: - pass - - if isinstance(exc, get_cancelled_exc_class()): - raise - else: - raise BrokenWorkerProcess from exc - - retval = pickle.loads(pickled_response) - if status == b"EXCEPTION": - assert isinstance(retval, BaseException) - raise retval - else: - return retval - - # First pickle the request before trying to reserve a worker process - await checkpoint_if_cancelled() - request = pickle.dumps(("run", func, args), protocol=pickle.HIGHEST_PROTOCOL) - - # If this is the first run in this event loop thread, set up the necessary variables - try: - workers = _process_pool_workers.get() - idle_workers = _process_pool_idle_workers.get() - except LookupError: - workers = set() - idle_workers = deque() - _process_pool_workers.set(workers) - _process_pool_idle_workers.set(idle_workers) - get_async_backend().setup_process_pool_exit_at_shutdown(workers) - - async with limiter or current_default_process_limiter(): - # Pop processes from the pool (starting from the most recently used) until we - # find one that hasn't exited yet - process: Process - while idle_workers: - process, idle_since = idle_workers.pop() - if process.returncode is None: - stdin = cast(ByteSendStream, process.stdin) - buffered = BufferedByteReceiveStream( - cast(ByteReceiveStream, process.stdout) - ) - - # Prune any other workers that have been idle for WORKER_MAX_IDLE_TIME - # seconds or longer - now = current_time() - killed_processes: list[Process] = [] - while idle_workers: - if now - idle_workers[0][1] < WORKER_MAX_IDLE_TIME: - break - - process_to_kill, idle_since = idle_workers.popleft() - process_to_kill.kill() - workers.remove(process_to_kill) - killed_processes.append(process_to_kill) - - with CancelScope(shield=True): - for killed_process in killed_processes: - await killed_process.aclose() - - break - - workers.remove(process) - else: - command = [sys.executable, "-u", "-m", __name__] - process = await open_process( - command, stdin=subprocess.PIPE, stdout=subprocess.PIPE - ) - try: - stdin = cast(ByteSendStream, process.stdin) - buffered = BufferedByteReceiveStream( - cast(ByteReceiveStream, process.stdout) - ) - with fail_after(20): - message = await buffered.receive(6) - - if message != b"READY\n": - raise BrokenWorkerProcess( - f"Worker process returned unexpected response: {message!r}" - ) - - main_module_path = getattr(sys.modules["__main__"], "__file__", None) - pickled = pickle.dumps( - ("init", sys.path, main_module_path), - protocol=pickle.HIGHEST_PROTOCOL, - ) - await send_raw_command(pickled) - except (BrokenWorkerProcess, get_cancelled_exc_class()): - raise - except BaseException as exc: - process.kill() - raise BrokenWorkerProcess( - "Error during worker process initialization" - ) from exc - - workers.add(process) - - with CancelScope(shield=not cancellable): - try: - return cast(T_Retval, await send_raw_command(request)) - finally: - if process in workers: - idle_workers.append((process, current_time())) - - -def current_default_process_limiter() -> CapacityLimiter: - """ - Return the capacity limiter that is used by default to limit the number of worker - processes. - - :return: a capacity limiter object - - """ - try: - return _default_process_limiter.get() - except LookupError: - limiter = CapacityLimiter(os.cpu_count() or 2) - _default_process_limiter.set(limiter) - return limiter - - -def process_worker() -> None: - # Redirect standard streams to os.devnull so that user code won't interfere with the - # parent-worker communication - stdin = sys.stdin - stdout = sys.stdout - sys.stdin = open(os.devnull) - sys.stdout = open(os.devnull, "w") - - stdout.buffer.write(b"READY\n") - while True: - retval = exception = None - try: - command, *args = pickle.load(stdin.buffer) - except EOFError: - return - except BaseException as exc: - exception = exc - else: - if command == "run": - func, args = args - try: - retval = func(*args) - except BaseException as exc: - exception = exc - elif command == "init": - main_module_path: str | None - sys.path, main_module_path = args - del sys.modules["__main__"] - if main_module_path and os.path.isfile(main_module_path): - # Load the parent's main module but as __mp_main__ instead of - # __main__ (like multiprocessing does) to avoid infinite recursion - try: - spec = spec_from_file_location("__mp_main__", main_module_path) - if spec and spec.loader: - main = module_from_spec(spec) - spec.loader.exec_module(main) - sys.modules["__main__"] = main - except BaseException as exc: - exception = exc - try: - if exception is not None: - status = b"EXCEPTION" - pickled = pickle.dumps(exception, pickle.HIGHEST_PROTOCOL) - else: - status = b"RETURN" - pickled = pickle.dumps(retval, pickle.HIGHEST_PROTOCOL) - except BaseException as exc: - exception = exc - status = b"EXCEPTION" - pickled = pickle.dumps(exc, pickle.HIGHEST_PROTOCOL) - - stdout.buffer.write(b"%s %d\n" % (status, len(pickled))) - stdout.buffer.write(pickled) - - # Respect SIGTERM - if isinstance(exception, SystemExit): - raise exception - - -if __name__ == "__main__": - process_worker() diff --git a/myenv/Lib/site-packages/anyio/to_thread.py b/myenv/Lib/site-packages/anyio/to_thread.py deleted file mode 100644 index 5070516..0000000 --- a/myenv/Lib/site-packages/anyio/to_thread.py +++ /dev/null @@ -1,69 +0,0 @@ -from __future__ import annotations - -import sys -from collections.abc import Callable -from typing import TypeVar -from warnings import warn - -from ._core._eventloop import get_async_backend -from .abc import CapacityLimiter - -if sys.version_info >= (3, 11): - from typing import TypeVarTuple, Unpack -else: - from typing_extensions import TypeVarTuple, Unpack - -T_Retval = TypeVar("T_Retval") -PosArgsT = TypeVarTuple("PosArgsT") - - -async def run_sync( - func: Callable[[Unpack[PosArgsT]], T_Retval], - *args: Unpack[PosArgsT], - abandon_on_cancel: bool = False, - cancellable: bool | None = None, - limiter: CapacityLimiter | None = None, -) -> T_Retval: - """ - Call the given function with the given arguments in a worker thread. - - If the ``cancellable`` option is enabled and the task waiting for its completion is - cancelled, the thread will still run its course but its return value (or any raised - exception) will be ignored. - - :param func: a callable - :param args: positional arguments for the callable - :param abandon_on_cancel: ``True`` to abandon the thread (leaving it to run - unchecked on own) if the host task is cancelled, ``False`` to ignore - cancellations in the host task until the operation has completed in the worker - thread - :param cancellable: deprecated alias of ``abandon_on_cancel``; will override - ``abandon_on_cancel`` if both parameters are passed - :param limiter: capacity limiter to use to limit the total amount of threads running - (if omitted, the default limiter is used) - :return: an awaitable that yields the return value of the function. - - """ - if cancellable is not None: - abandon_on_cancel = cancellable - warn( - "The `cancellable=` keyword argument to `anyio.to_thread.run_sync` is " - "deprecated since AnyIO 4.1.0; use `abandon_on_cancel=` instead", - DeprecationWarning, - stacklevel=2, - ) - - return await get_async_backend().run_sync_in_worker_thread( - func, args, abandon_on_cancel=abandon_on_cancel, limiter=limiter - ) - - -def current_default_thread_limiter() -> CapacityLimiter: - """ - Return the capacity limiter that is used by default to limit the number of - concurrent threads. - - :return: a capacity limiter object - - """ - return get_async_backend().current_default_thread_limiter() diff --git a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/INSTALLER b/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/LICENSE b/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/LICENSE deleted file mode 100644 index 11069ed..0000000 --- a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - -1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - -2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - -3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - -4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - -5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - -6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - -7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - -8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS - -APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - -Copyright [yyyy] [name of copyright owner] - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. diff --git a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/METADATA b/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/METADATA deleted file mode 100644 index cdb9a9c..0000000 --- a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/METADATA +++ /dev/null @@ -1,314 +0,0 @@ -Metadata-Version: 2.1 -Name: bcrypt -Version: 4.2.0 -Summary: Modern password hashing for your software and your servers -Author-email: The Python Cryptographic Authority developers -License: Apache-2.0 -Project-URL: homepage, https://github.com/pyca/bcrypt/ -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE -Provides-Extra: tests -Requires-Dist: pytest !=3.3.0,>=3.2.1 ; extra == 'tests' -Provides-Extra: typecheck -Requires-Dist: mypy ; extra == 'typecheck' - -bcrypt -====== - -.. image:: https://img.shields.io/pypi/v/bcrypt.svg - :target: https://pypi.org/project/bcrypt/ - :alt: Latest Version - -.. image:: https://github.com/pyca/bcrypt/workflows/CI/badge.svg?branch=main - :target: https://github.com/pyca/bcrypt/actions?query=workflow%3ACI+branch%3Amain - -Acceptable password hashing for your software and your servers (but you should -really use argon2id or scrypt) - - -Installation -============ - -To install bcrypt, simply: - -.. code:: bash - - $ pip install bcrypt - -Note that bcrypt should build very easily on Linux provided you have a C -compiler and a Rust compiler (the minimum supported Rust version is 1.56.0). - -For Debian and Ubuntu, the following command will ensure that the required dependencies are installed: - -.. code:: bash - - $ sudo apt-get install build-essential cargo - -For Fedora and RHEL-derivatives, the following command will ensure that the required dependencies are installed: - -.. code:: bash - - $ sudo yum install gcc cargo - -For Alpine, the following command will ensure that the required dependencies are installed: - -.. code:: bash - - $ apk add --update musl-dev gcc cargo - - -Alternatives -============ - -While bcrypt remains an acceptable choice for password storage, depending on your specific use case you may also want to consider using scrypt (either via `standard library`_ or `cryptography`_) or argon2id via `argon2_cffi`_. - -Changelog -========= - -4.2.0 ------ - -* Bump Rust dependency versions -* Removed the ``BCRYPT_ALLOW_RUST_163`` environment variable. - -4.1.3 ------ - -* Bump Rust dependency versions - -4.1.2 ------ - -* Publish both ``py37`` and ``py39`` wheels. This should resolve some errors - relating to initializing a module multiple times per process. - -4.1.1 ------ - -* Fixed the type signature on the ``kdf`` method. -* Fixed packaging bug on Windows. -* Fixed incompatibility with passlib package detection assumptions. - -4.1.0 ------ - -* Dropped support for Python 3.6. -* Bumped MSRV to 1.64. (Note: Rust 1.63 can be used by setting the ``BCRYPT_ALLOW_RUST_163`` environment variable) - -4.0.1 ------ - -* We now build PyPy ``manylinux`` wheels. -* Fixed a bug where passing an invalid ``salt`` to ``checkpw`` could result in - a ``pyo3_runtime.PanicException``. It now correctly raises a ``ValueError``. - -4.0.0 ------ - -* ``bcrypt`` is now implemented in Rust. Users building from source will need - to have a Rust compiler available. Nothing will change for users downloading - wheels. -* We no longer ship ``manylinux2010`` wheels. Users should upgrade to the latest - ``pip`` to ensure this doesn’t cause issues downloading wheels on their - platform. We now ship ``manylinux_2_28`` wheels for users on new enough platforms. -* ``NUL`` bytes are now allowed in inputs. - - -3.2.2 ------ - -* Fixed packaging of ``py.typed`` files in wheels so that ``mypy`` works. - -3.2.1 ------ - -* Added support for compilation on z/OS -* The next release of ``bcrypt`` with be 4.0 and it will require Rust at - compile time, for users building from source. There will be no additional - requirement for users who are installing from wheels. Users on most - platforms will be able to obtain a wheel by making sure they have an up to - date ``pip``. The minimum supported Rust version will be 1.56.0. -* This will be the final release for which we ship ``manylinux2010`` wheels. - Going forward the minimum supported manylinux ABI for our wheels will be - ``manylinux2014``. The vast majority of users will continue to receive - ``manylinux`` wheels provided they have an up to date ``pip``. - - -3.2.0 ------ - -* Added typehints for library functions. -* Dropped support for Python versions less than 3.6 (2.7, 3.4, 3.5). -* Shipped ``abi3`` Windows wheels (requires pip >= 20). - -3.1.7 ------ - -* Set a ``setuptools`` lower bound for PEP517 wheel building. -* We no longer distribute 32-bit ``manylinux1`` wheels. Continuing to produce - them was a maintenance burden. - -3.1.6 ------ - -* Added support for compilation on Haiku. - -3.1.5 ------ - -* Added support for compilation on AIX. -* Dropped Python 2.6 and 3.3 support. -* Switched to using ``abi3`` wheels for Python 3. If you are not getting a - wheel on a compatible platform please upgrade your ``pip`` version. - -3.1.4 ------ - -* Fixed compilation with mingw and on illumos. - -3.1.3 ------ -* Fixed a compilation issue on Solaris. -* Added a warning when using too few rounds with ``kdf``. - -3.1.2 ------ -* Fixed a compile issue affecting big endian platforms. -* Fixed invalid escape sequence warnings on Python 3.6. -* Fixed building in non-UTF8 environments on Python 2. - -3.1.1 ------ -* Resolved a ``UserWarning`` when used with ``cffi`` 1.8.3. - -3.1.0 ------ -* Added support for ``checkpw``, a convenience method for verifying a password. -* Ensure that you get a ``$2y$`` hash when you input a ``$2y$`` salt. -* Fixed a regression where ``$2a`` hashes were vulnerable to a wraparound bug. -* Fixed compilation under Alpine Linux. - -3.0.0 ------ -* Switched the C backend to code obtained from the OpenBSD project rather than - openwall. -* Added support for ``bcrypt_pbkdf`` via the ``kdf`` function. - -2.0.0 ------ -* Added support for an adjustible prefix when calling ``gensalt``. -* Switched to CFFI 1.0+ - -Usage ------ - -Password Hashing -~~~~~~~~~~~~~~~~ - -Hashing and then later checking that a password matches the previous hashed -password is very simple: - -.. code:: pycon - - >>> import bcrypt - >>> password = b"super secret password" - >>> # Hash a password for the first time, with a randomly-generated salt - >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt()) - >>> # Check that an unhashed password matches one that has previously been - >>> # hashed - >>> if bcrypt.checkpw(password, hashed): - ... print("It Matches!") - ... else: - ... print("It Does not Match :(") - -KDF -~~~ - -As of 3.0.0 ``bcrypt`` now offers a ``kdf`` function which does ``bcrypt_pbkdf``. -This KDF is used in OpenSSH's newer encrypted private key format. - -.. code:: pycon - - >>> import bcrypt - >>> key = bcrypt.kdf( - ... password=b'password', - ... salt=b'salt', - ... desired_key_bytes=32, - ... rounds=100) - - -Adjustable Work Factor -~~~~~~~~~~~~~~~~~~~~~~ -One of bcrypt's features is an adjustable logarithmic work factor. To adjust -the work factor merely pass the desired number of rounds to -``bcrypt.gensalt(rounds=12)`` which defaults to 12): - -.. code:: pycon - - >>> import bcrypt - >>> password = b"super secret password" - >>> # Hash a password for the first time, with a certain number of rounds - >>> hashed = bcrypt.hashpw(password, bcrypt.gensalt(14)) - >>> # Check that a unhashed password matches one that has previously been - >>> # hashed - >>> if bcrypt.checkpw(password, hashed): - ... print("It Matches!") - ... else: - ... print("It Does not Match :(") - - -Adjustable Prefix -~~~~~~~~~~~~~~~~~ - -Another one of bcrypt's features is an adjustable prefix to let you define what -libraries you'll remain compatible with. To adjust this, pass either ``2a`` or -``2b`` (the default) to ``bcrypt.gensalt(prefix=b"2b")`` as a bytes object. - -As of 3.0.0 the ``$2y$`` prefix is still supported in ``hashpw`` but deprecated. - -Maximum Password Length -~~~~~~~~~~~~~~~~~~~~~~~ - -The bcrypt algorithm only handles passwords up to 72 characters, any characters -beyond that are ignored. To work around this, a common approach is to hash a -password with a cryptographic hash (such as ``sha256``) and then base64 -encode it to prevent NULL byte problems before hashing the result with -``bcrypt``: - -.. code:: pycon - - >>> password = b"an incredibly long password" * 10 - >>> hashed = bcrypt.hashpw( - ... base64.b64encode(hashlib.sha256(password).digest()), - ... bcrypt.gensalt() - ... ) - -Compatibility -------------- - -This library should be compatible with py-bcrypt and it will run on Python -3.6+, and PyPy 3. - -Security --------- - -``bcrypt`` follows the `same security policy as cryptography`_, if you -identify a vulnerability, we ask you to contact us privately. - -.. _`same security policy as cryptography`: https://cryptography.io/en/latest/security.html -.. _`standard library`: https://docs.python.org/3/library/hashlib.html#hashlib.scrypt -.. _`argon2_cffi`: https://argon2-cffi.readthedocs.io -.. _`cryptography`: https://cryptography.io/en/latest/hazmat/primitives/key-derivation-functions/#cryptography.hazmat.primitives.kdf.scrypt.Scrypt diff --git a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/RECORD b/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/RECORD deleted file mode 100644 index 8a8af79..0000000 --- a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/RECORD +++ /dev/null @@ -1,11 +0,0 @@ -bcrypt-4.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -bcrypt-4.2.0.dist-info/LICENSE,sha256=gXPVwptPlW1TJ4HSuG5OMPg-a3h43OGMkZRR1rpwfJA,10850 -bcrypt-4.2.0.dist-info/METADATA,sha256=y3n6wKyBQcHQVb6IYNSZtEOuWyNA5ks_KfERdu4Nf8A,9890 -bcrypt-4.2.0.dist-info/RECORD,, -bcrypt-4.2.0.dist-info/WHEEL,sha256=Ba1paPmOmGKaZKdZNuQuROs-GGTc11aSx1H0TqVoZaE,100 -bcrypt-4.2.0.dist-info/top_level.txt,sha256=BkR_qBzDbSuycMzHWE1vzXrfYecAzUVmQs6G2CukqNI,7 -bcrypt/__init__.py,sha256=zTtuqGGQxDgxcqm1f_0UbbPS6uCl-WxL98gSYDMSUbw,1000 -bcrypt/__init__.pyi,sha256=ITUCB9mPVU8sKUbJQMDUH5YfQXZb1O55F9qvKZR_o8I,333 -bcrypt/__pycache__/__init__.cpython-312.pyc,, -bcrypt/_bcrypt.pyd,sha256=PSbv7t1A6ctn1mgDsjX1bTilky0dgrhsrk7azlOF0no,304128 -bcrypt/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/WHEEL b/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/WHEEL deleted file mode 100644 index e166bdd..0000000 --- a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.43.0) -Root-Is-Purelib: false -Tag: cp39-abi3-win_amd64 - diff --git a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/top_level.txt b/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/top_level.txt deleted file mode 100644 index 7f0b6e7..0000000 --- a/myenv/Lib/site-packages/bcrypt-4.2.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -bcrypt diff --git a/myenv/Lib/site-packages/bcrypt/__init__.py b/myenv/Lib/site-packages/bcrypt/__init__.py deleted file mode 100644 index c201934..0000000 --- a/myenv/Lib/site-packages/bcrypt/__init__.py +++ /dev/null @@ -1,43 +0,0 @@ -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from ._bcrypt import ( - __author__, - __copyright__, - __email__, - __license__, - __summary__, - __title__, - __uri__, - checkpw, - gensalt, - hashpw, - kdf, -) -from ._bcrypt import ( - __version_ex__ as __version__, -) - -__all__ = [ - "gensalt", - "hashpw", - "checkpw", - "kdf", - "__title__", - "__summary__", - "__uri__", - "__version__", - "__author__", - "__email__", - "__license__", - "__copyright__", -] diff --git a/myenv/Lib/site-packages/bcrypt/__init__.pyi b/myenv/Lib/site-packages/bcrypt/__init__.pyi deleted file mode 100644 index 12e4a2e..0000000 --- a/myenv/Lib/site-packages/bcrypt/__init__.pyi +++ /dev/null @@ -1,10 +0,0 @@ -def gensalt(rounds: int = 12, prefix: bytes = b"2b") -> bytes: ... -def hashpw(password: bytes, salt: bytes) -> bytes: ... -def checkpw(password: bytes, hashed_password: bytes) -> bool: ... -def kdf( - password: bytes, - salt: bytes, - desired_key_bytes: int, - rounds: int, - ignore_few_rounds: bool = False, -) -> bytes: ... diff --git a/myenv/Lib/site-packages/bcrypt/_bcrypt.pyd b/myenv/Lib/site-packages/bcrypt/_bcrypt.pyd deleted file mode 100644 index 4d0bbce..0000000 Binary files a/myenv/Lib/site-packages/bcrypt/_bcrypt.pyd and /dev/null differ diff --git a/myenv/Lib/site-packages/bcrypt/py.typed b/myenv/Lib/site-packages/bcrypt/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/blinker-1.8.2.dist-info/INSTALLER b/myenv/Lib/site-packages/blinker-1.8.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/blinker-1.8.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/blinker-1.8.2.dist-info/LICENSE.txt b/myenv/Lib/site-packages/blinker-1.8.2.dist-info/LICENSE.txt deleted file mode 100644 index 79c9825..0000000 --- a/myenv/Lib/site-packages/blinker-1.8.2.dist-info/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -Copyright 2010 Jason Kirtland - -Permission is hereby granted, free of charge, to any person obtaining a -copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/myenv/Lib/site-packages/blinker-1.8.2.dist-info/METADATA b/myenv/Lib/site-packages/blinker-1.8.2.dist-info/METADATA deleted file mode 100644 index efa45f5..0000000 --- a/myenv/Lib/site-packages/blinker-1.8.2.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.1 -Name: blinker -Version: 1.8.2 -Summary: Fast, simple object-to-object and broadcast signaling -Author: Jason Kirtland -Maintainer-email: Pallets Ecosystem -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://blinker.readthedocs.io -Project-URL: Source, https://github.com/pallets-eco/blinker/ - -# Blinker - -Blinker provides a fast dispatching system that allows any number of -interested parties to subscribe to events, or "signals". - - -## Pallets Community Ecosystem - -> [!IMPORTANT]\ -> This project is part of the Pallets Community Ecosystem. Pallets is the open -> source organization that maintains Flask; Pallets-Eco enables community -> maintenance of related projects. If you are interested in helping maintain -> this project, please reach out on [the Pallets Discord server][discord]. -> -> [discord]: https://discord.gg/pallets - - -## Example - -Signal receivers can subscribe to specific senders or receive signals -sent by any sender. - -```pycon ->>> from blinker import signal ->>> started = signal('round-started') ->>> def each(round): -... print(f"Round {round}") -... ->>> started.connect(each) - ->>> def round_two(round): -... print("This is round two.") -... ->>> started.connect(round_two, sender=2) - ->>> for round in range(1, 4): -... started.send(round) -... -Round 1! -Round 2! -This is round two. -Round 3! -``` - diff --git a/myenv/Lib/site-packages/blinker-1.8.2.dist-info/RECORD b/myenv/Lib/site-packages/blinker-1.8.2.dist-info/RECORD deleted file mode 100644 index b6a6b7e..0000000 --- a/myenv/Lib/site-packages/blinker-1.8.2.dist-info/RECORD +++ /dev/null @@ -1,12 +0,0 @@ -blinker-1.8.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -blinker-1.8.2.dist-info/LICENSE.txt,sha256=nrc6HzhZekqhcCXSrhvjg5Ykx5XphdTw6Xac4p-spGc,1054 -blinker-1.8.2.dist-info/METADATA,sha256=3tEx40hm9IEofyFqDPJsDPE9MAIEhtifapoSp7FqzuA,1633 -blinker-1.8.2.dist-info/RECORD,, -blinker-1.8.2.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -blinker/__init__.py,sha256=ymyJY_PoTgBzaPgdr4dq-RRsGh7D-sYQIGMNp8Rx4qc,1577 -blinker/__pycache__/__init__.cpython-312.pyc,, -blinker/__pycache__/_utilities.cpython-312.pyc,, -blinker/__pycache__/base.cpython-312.pyc,, -blinker/_utilities.py,sha256=0J7eeXXTUx0Ivf8asfpx0ycVkp0Eqfqnj117x2mYX9E,1675 -blinker/base.py,sha256=nIZJEtXQ8LLZZJrwVp2wQcdfCzDixvAHR9VpSWiyVcQ,22574 -blinker/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/myenv/Lib/site-packages/blinker-1.8.2.dist-info/WHEEL b/myenv/Lib/site-packages/blinker-1.8.2.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/myenv/Lib/site-packages/blinker-1.8.2.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/blinker/__init__.py b/myenv/Lib/site-packages/blinker/__init__.py deleted file mode 100644 index c93527e..0000000 --- a/myenv/Lib/site-packages/blinker/__init__.py +++ /dev/null @@ -1,60 +0,0 @@ -from __future__ import annotations - -import typing as t - -from .base import ANY -from .base import default_namespace -from .base import NamedSignal -from .base import Namespace -from .base import Signal -from .base import signal - -__all__ = [ - "ANY", - "default_namespace", - "NamedSignal", - "Namespace", - "Signal", - "signal", -] - - -def __getattr__(name: str) -> t.Any: - import warnings - - if name == "__version__": - import importlib.metadata - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Blinker 1.9.0. Use feature detection or" - " 'importlib.metadata.version(\"blinker\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("blinker") - - if name == "receiver_connected": - from .base import _receiver_connected - - warnings.warn( - "The global 'receiver_connected' signal is deprecated and will be" - " removed in Blinker 1.9. Use 'Signal.receiver_connected' and" - " 'Signal.receiver_disconnected' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _receiver_connected - - if name == "WeakNamespace": - from .base import _WeakNamespace - - warnings.warn( - "'WeakNamespace' is deprecated and will be removed in Blinker 1.9." - " Use 'Namespace' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _WeakNamespace - - raise AttributeError(name) diff --git a/myenv/Lib/site-packages/blinker/_utilities.py b/myenv/Lib/site-packages/blinker/_utilities.py deleted file mode 100644 index 000c902..0000000 --- a/myenv/Lib/site-packages/blinker/_utilities.py +++ /dev/null @@ -1,64 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import inspect -import typing as t -from weakref import ref -from weakref import WeakMethod - -T = t.TypeVar("T") - - -class Symbol: - """A constant symbol, nicer than ``object()``. Repeated calls return the - same instance. - - >>> Symbol('foo') is Symbol('foo') - True - >>> Symbol('foo') - foo - """ - - symbols: t.ClassVar[dict[str, Symbol]] = {} - - def __new__(cls, name: str) -> Symbol: - if name in cls.symbols: - return cls.symbols[name] - - obj = super().__new__(cls) - cls.symbols[name] = obj - return obj - - def __init__(self, name: str) -> None: - self.name = name - - def __repr__(self) -> str: - return self.name - - def __getnewargs__(self) -> tuple[t.Any, ...]: - return (self.name,) - - -def make_id(obj: object) -> c.Hashable: - """Get a stable identifier for a receiver or sender, to be used as a dict - key or in a set. - """ - if inspect.ismethod(obj): - # The id of a bound method is not stable, but the id of the unbound - # function and instance are. - return id(obj.__func__), id(obj.__self__) - - if isinstance(obj, (str, int)): - # Instances with the same value always compare equal and have the same - # hash, even if the id may change. - return obj - - # Assume other types are not hashable but will always be the same instance. - return id(obj) - - -def make_ref(obj: T, callback: c.Callable[[ref[T]], None] | None = None) -> ref[T]: - if inspect.ismethod(obj): - return WeakMethod(obj, callback) # type: ignore[arg-type, return-value] - - return ref(obj, callback) diff --git a/myenv/Lib/site-packages/blinker/base.py b/myenv/Lib/site-packages/blinker/base.py deleted file mode 100644 index ec494b1..0000000 --- a/myenv/Lib/site-packages/blinker/base.py +++ /dev/null @@ -1,621 +0,0 @@ -from __future__ import annotations - -import collections.abc as c -import typing as t -import warnings -import weakref -from collections import defaultdict -from contextlib import AbstractContextManager -from contextlib import contextmanager -from functools import cached_property -from inspect import iscoroutinefunction -from weakref import WeakValueDictionary - -from ._utilities import make_id -from ._utilities import make_ref -from ._utilities import Symbol - -if t.TYPE_CHECKING: - F = t.TypeVar("F", bound=c.Callable[..., t.Any]) - -ANY = Symbol("ANY") -"""Symbol for "any sender".""" - -ANY_ID = 0 - - -class Signal: - """A notification emitter. - - :param doc: The docstring for the signal. - """ - - ANY = ANY - """An alias for the :data:`~blinker.ANY` sender symbol.""" - - set_class: type[set[t.Any]] = set - """The set class to use for tracking connected receivers and senders. - Python's ``set`` is unordered. If receivers must be dispatched in the order - they were connected, an ordered set implementation can be used. - - .. versionadded:: 1.7 - """ - - @cached_property - def receiver_connected(self) -> Signal: - """Emitted at the end of each :meth:`connect` call. - - The signal sender is the signal instance, and the :meth:`connect` - arguments are passed through: ``receiver``, ``sender``, and ``weak``. - - .. versionadded:: 1.2 - """ - return Signal(doc="Emitted after a receiver connects.") - - @cached_property - def receiver_disconnected(self) -> Signal: - """Emitted at the end of each :meth:`disconnect` call. - - The sender is the signal instance, and the :meth:`disconnect` arguments - are passed through: ``receiver`` and ``sender``. - - This signal is emitted **only** when :meth:`disconnect` is called - explicitly. This signal cannot be emitted by an automatic disconnect - when a weakly referenced receiver or sender goes out of scope, as the - instance is no longer be available to be used as the sender for this - signal. - - An alternative approach is available by subscribing to - :attr:`receiver_connected` and setting up a custom weakref cleanup - callback on weak receivers and senders. - - .. versionadded:: 1.2 - """ - return Signal(doc="Emitted after a receiver disconnects.") - - def __init__(self, doc: str | None = None) -> None: - if doc: - self.__doc__ = doc - - self.receivers: dict[ - t.Any, weakref.ref[c.Callable[..., t.Any]] | c.Callable[..., t.Any] - ] = {} - """The map of connected receivers. Useful to quickly check if any - receivers are connected to the signal: ``if s.receivers:``. The - structure and data is not part of the public API, but checking its - boolean value is. - """ - - self.is_muted: bool = False - self._by_receiver: dict[t.Any, set[t.Any]] = defaultdict(self.set_class) - self._by_sender: dict[t.Any, set[t.Any]] = defaultdict(self.set_class) - self._weak_senders: dict[t.Any, weakref.ref[t.Any]] = {} - - def connect(self, receiver: F, sender: t.Any = ANY, weak: bool = True) -> F: - """Connect ``receiver`` to be called when the signal is sent by - ``sender``. - - :param receiver: The callable to call when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument - along with any extra keyword arguments. - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. A receiver may be connected - to multiple senders by calling :meth:`connect` multiple times. - :param weak: Track the receiver with a :mod:`weakref`. The receiver will - be automatically disconnected when it is garbage collected. When - connecting a receiver defined within a function, set to ``False``, - otherwise it will be disconnected when the function scope ends. - """ - receiver_id = make_id(receiver) - sender_id = ANY_ID if sender is ANY else make_id(sender) - - if weak: - self.receivers[receiver_id] = make_ref( - receiver, self._make_cleanup_receiver(receiver_id) - ) - else: - self.receivers[receiver_id] = receiver - - self._by_sender[sender_id].add(receiver_id) - self._by_receiver[receiver_id].add(sender_id) - - if sender is not ANY and sender_id not in self._weak_senders: - # store a cleanup for weakref-able senders - try: - self._weak_senders[sender_id] = make_ref( - sender, self._make_cleanup_sender(sender_id) - ) - except TypeError: - pass - - if "receiver_connected" in self.__dict__ and self.receiver_connected.receivers: - try: - self.receiver_connected.send( - self, receiver=receiver, sender=sender, weak=weak - ) - except TypeError: - # TODO no explanation or test for this - self.disconnect(receiver, sender) - raise - - if _receiver_connected.receivers and self is not _receiver_connected: - try: - _receiver_connected.send( - self, receiver_arg=receiver, sender_arg=sender, weak_arg=weak - ) - except TypeError: - self.disconnect(receiver, sender) - raise - - return receiver - - def connect_via(self, sender: t.Any, weak: bool = False) -> c.Callable[[F], F]: - """Connect the decorated function to be called when the signal is sent - by ``sender``. - - The decorated function will be called when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument along - with any extra keyword arguments. - - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. A receiver may be connected - to multiple senders by calling :meth:`connect` multiple times. - :param weak: Track the receiver with a :mod:`weakref`. The receiver will - be automatically disconnected when it is garbage collected. When - connecting a receiver defined within a function, set to ``False``, - otherwise it will be disconnected when the function scope ends.= - - .. versionadded:: 1.1 - """ - - def decorator(fn: F) -> F: - self.connect(fn, sender, weak) - return fn - - return decorator - - @contextmanager - def connected_to( - self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY - ) -> c.Generator[None, None, None]: - """A context manager that temporarily connects ``receiver`` to the - signal while a ``with`` block executes. When the block exits, the - receiver is disconnected. Useful for tests. - - :param receiver: The callable to call when :meth:`send` is called with - the given ``sender``, passing ``sender`` as a positional argument - along with any extra keyword arguments. - :param sender: Any object or :data:`ANY`. ``receiver`` will only be - called when :meth:`send` is called with this sender. If ``ANY``, the - receiver will be called for any sender. - - .. versionadded:: 1.1 - """ - self.connect(receiver, sender=sender, weak=False) - - try: - yield None - finally: - self.disconnect(receiver) - - @contextmanager - def muted(self) -> c.Generator[None, None, None]: - """A context manager that temporarily disables the signal. No receivers - will be called if the signal is sent, until the ``with`` block exits. - Useful for tests. - """ - self.is_muted = True - - try: - yield None - finally: - self.is_muted = False - - def temporarily_connected_to( - self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY - ) -> AbstractContextManager[None]: - """Deprecated alias for :meth:`connected_to`. - - .. deprecated:: 1.1 - Renamed to ``connected_to``. Will be removed in Blinker 1.9. - - .. versionadded:: 0.9 - """ - warnings.warn( - "'temporarily_connected_to' is renamed to 'connected_to'. The old name is" - " deprecated and will be removed in Blinker 1.9.", - DeprecationWarning, - stacklevel=2, - ) - return self.connected_to(receiver, sender) - - def send( - self, - sender: t.Any | None = None, - /, - *, - _async_wrapper: c.Callable[ - [c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]]], c.Callable[..., t.Any] - ] - | None = None, - **kwargs: t.Any, - ) -> list[tuple[c.Callable[..., t.Any], t.Any]]: - """Call all receivers that are connected to the given ``sender`` - or :data:`ANY`. Each receiver is called with ``sender`` as a positional - argument along with any extra keyword arguments. Return a list of - ``(receiver, return value)`` tuples. - - The order receivers are called is undefined, but can be influenced by - setting :attr:`set_class`. - - If a receiver raises an exception, that exception will propagate up. - This makes debugging straightforward, with an assumption that correctly - implemented receivers will not raise. - - :param sender: Call receivers connected to this sender, in addition to - those connected to :data:`ANY`. - :param _async_wrapper: Will be called on any receivers that are async - coroutines to turn them into sync callables. For example, could run - the receiver with an event loop. - :param kwargs: Extra keyword arguments to pass to each receiver. - - .. versionchanged:: 1.7 - Added the ``_async_wrapper`` argument. - """ - if self.is_muted: - return [] - - results = [] - - for receiver in self.receivers_for(sender): - if iscoroutinefunction(receiver): - if _async_wrapper is None: - raise RuntimeError("Cannot send to a coroutine function.") - - result = _async_wrapper(receiver)(sender, **kwargs) - else: - result = receiver(sender, **kwargs) - - results.append((receiver, result)) - - return results - - async def send_async( - self, - sender: t.Any | None = None, - /, - *, - _sync_wrapper: c.Callable[ - [c.Callable[..., t.Any]], c.Callable[..., c.Coroutine[t.Any, t.Any, t.Any]] - ] - | None = None, - **kwargs: t.Any, - ) -> list[tuple[c.Callable[..., t.Any], t.Any]]: - """Await all receivers that are connected to the given ``sender`` - or :data:`ANY`. Each receiver is called with ``sender`` as a positional - argument along with any extra keyword arguments. Return a list of - ``(receiver, return value)`` tuples. - - The order receivers are called is undefined, but can be influenced by - setting :attr:`set_class`. - - If a receiver raises an exception, that exception will propagate up. - This makes debugging straightforward, with an assumption that correctly - implemented receivers will not raise. - - :param sender: Call receivers connected to this sender, in addition to - those connected to :data:`ANY`. - :param _sync_wrapper: Will be called on any receivers that are sync - callables to turn them into async coroutines. For example, - could call the receiver in a thread. - :param kwargs: Extra keyword arguments to pass to each receiver. - - .. versionadded:: 1.7 - """ - if self.is_muted: - return [] - - results = [] - - for receiver in self.receivers_for(sender): - if not iscoroutinefunction(receiver): - if _sync_wrapper is None: - raise RuntimeError("Cannot send to a non-coroutine function.") - - result = await _sync_wrapper(receiver)(sender, **kwargs) - else: - result = await receiver(sender, **kwargs) - - results.append((receiver, result)) - - return results - - def has_receivers_for(self, sender: t.Any) -> bool: - """Check if there is at least one receiver that will be called with the - given ``sender``. A receiver connected to :data:`ANY` will always be - called, regardless of sender. Does not check if weakly referenced - receivers are still live. See :meth:`receivers_for` for a stronger - search. - - :param sender: Check for receivers connected to this sender, in addition - to those connected to :data:`ANY`. - """ - if not self.receivers: - return False - - if self._by_sender[ANY_ID]: - return True - - if sender is ANY: - return False - - return make_id(sender) in self._by_sender - - def receivers_for( - self, sender: t.Any - ) -> c.Generator[c.Callable[..., t.Any], None, None]: - """Yield each receiver to be called for ``sender``, in addition to those - to be called for :data:`ANY`. Weakly referenced receivers that are not - live will be disconnected and skipped. - - :param sender: Yield receivers connected to this sender, in addition - to those connected to :data:`ANY`. - """ - # TODO: test receivers_for(ANY) - if not self.receivers: - return - - sender_id = make_id(sender) - - if sender_id in self._by_sender: - ids = self._by_sender[ANY_ID] | self._by_sender[sender_id] - else: - ids = self._by_sender[ANY_ID].copy() - - for receiver_id in ids: - receiver = self.receivers.get(receiver_id) - - if receiver is None: - continue - - if isinstance(receiver, weakref.ref): - strong = receiver() - - if strong is None: - self._disconnect(receiver_id, ANY_ID) - continue - - yield strong - else: - yield receiver - - def disconnect(self, receiver: c.Callable[..., t.Any], sender: t.Any = ANY) -> None: - """Disconnect ``receiver`` from being called when the signal is sent by - ``sender``. - - :param receiver: A connected receiver callable. - :param sender: Disconnect from only this sender. By default, disconnect - from all senders. - """ - sender_id: c.Hashable - - if sender is ANY: - sender_id = ANY_ID - else: - sender_id = make_id(sender) - - receiver_id = make_id(receiver) - self._disconnect(receiver_id, sender_id) - - if ( - "receiver_disconnected" in self.__dict__ - and self.receiver_disconnected.receivers - ): - self.receiver_disconnected.send(self, receiver=receiver, sender=sender) - - def _disconnect(self, receiver_id: c.Hashable, sender_id: c.Hashable) -> None: - if sender_id == ANY_ID: - if self._by_receiver.pop(receiver_id, None) is not None: - for bucket in self._by_sender.values(): - bucket.discard(receiver_id) - - self.receivers.pop(receiver_id, None) - else: - self._by_sender[sender_id].discard(receiver_id) - self._by_receiver[receiver_id].discard(sender_id) - - def _make_cleanup_receiver( - self, receiver_id: c.Hashable - ) -> c.Callable[[weakref.ref[c.Callable[..., t.Any]]], None]: - """Create a callback function to disconnect a weakly referenced - receiver when it is garbage collected. - """ - - def cleanup(ref: weakref.ref[c.Callable[..., t.Any]]) -> None: - self._disconnect(receiver_id, ANY_ID) - - return cleanup - - def _make_cleanup_sender( - self, sender_id: c.Hashable - ) -> c.Callable[[weakref.ref[t.Any]], None]: - """Create a callback function to disconnect all receivers for a weakly - referenced sender when it is garbage collected. - """ - assert sender_id != ANY_ID - - def cleanup(ref: weakref.ref[t.Any]) -> None: - self._weak_senders.pop(sender_id, None) - - for receiver_id in self._by_sender.pop(sender_id, ()): - self._by_receiver[receiver_id].discard(sender_id) - - return cleanup - - def _cleanup_bookkeeping(self) -> None: - """Prune unused sender/receiver bookkeeping. Not threadsafe. - - Connecting & disconnecting leaves behind a small amount of bookkeeping - data. Typical workloads using Blinker, for example in most web apps, - Flask, CLI scripts, etc., are not adversely affected by this - bookkeeping. - - With a long-running process performing dynamic signal routing with high - volume, e.g. connecting to function closures, senders are all unique - object instances. Doing all of this over and over may cause memory usage - to grow due to extraneous bookkeeping. (An empty ``set`` for each stale - sender/receiver pair.) - - This method will prune that bookkeeping away, with the caveat that such - pruning is not threadsafe. The risk is that cleanup of a fully - disconnected receiver/sender pair occurs while another thread is - connecting that same pair. If you are in the highly dynamic, unique - receiver/sender situation that has lead you to this method, that failure - mode is perhaps not a big deal for you. - """ - for mapping in (self._by_sender, self._by_receiver): - for ident, bucket in list(mapping.items()): - if not bucket: - mapping.pop(ident, None) - - def _clear_state(self) -> None: - """Disconnect all receivers and senders. Useful for tests.""" - self._weak_senders.clear() - self.receivers.clear() - self._by_sender.clear() - self._by_receiver.clear() - - -_receiver_connected = Signal( - """\ -Sent by a :class:`Signal` after a receiver connects. - -:argument: the Signal that was connected to -:keyword receiver_arg: the connected receiver -:keyword sender_arg: the sender to connect to -:keyword weak_arg: true if the connection to receiver_arg is a weak reference - -.. deprecated:: 1.2 - Individual signals have their own :attr:`~Signal.receiver_connected` and - :attr:`~Signal.receiver_disconnected` signals with a slightly simplified - call signature. This global signal will be removed in Blinker 1.9. -""" -) - - -class NamedSignal(Signal): - """A named generic notification emitter. The name is not used by the signal - itself, but matches the key in the :class:`Namespace` that it belongs to. - - :param name: The name of the signal within the namespace. - :param doc: The docstring for the signal. - """ - - def __init__(self, name: str, doc: str | None = None) -> None: - super().__init__(doc) - - #: The name of this signal. - self.name: str = name - - def __repr__(self) -> str: - base = super().__repr__() - return f"{base[:-1]}; {self.name!r}>" # noqa: E702 - - -if t.TYPE_CHECKING: - - class PNamespaceSignal(t.Protocol): - def __call__(self, name: str, doc: str | None = None) -> NamedSignal: ... - - # Python < 3.9 - _NamespaceBase = dict[str, NamedSignal] # type: ignore[misc] -else: - _NamespaceBase = dict - - -class Namespace(_NamespaceBase): - """A dict mapping names to signals.""" - - def signal(self, name: str, doc: str | None = None) -> NamedSignal: - """Return the :class:`NamedSignal` for the given ``name``, creating it - if required. Repeated calls with the same name return the same signal. - - :param name: The name of the signal. - :param doc: The docstring of the signal. - """ - if name not in self: - self[name] = NamedSignal(name, doc) - - return self[name] - - -class _WeakNamespace(WeakValueDictionary): # type: ignore[type-arg] - """A weak mapping of names to signals. - - Automatically cleans up unused signals when the last reference goes out - of scope. This namespace implementation provides similar behavior to Blinker - <= 1.2. - - .. deprecated:: 1.3 - Will be removed in Blinker 1.9. - - .. versionadded:: 1.3 - """ - - def __init__(self) -> None: - warnings.warn( - "'WeakNamespace' is deprecated and will be removed in Blinker 1.9." - " Use 'Namespace' instead.", - DeprecationWarning, - stacklevel=2, - ) - super().__init__() - - def signal(self, name: str, doc: str | None = None) -> NamedSignal: - """Return the :class:`NamedSignal` for the given ``name``, creating it - if required. Repeated calls with the same name return the same signal. - - :param name: The name of the signal. - :param doc: The docstring of the signal. - """ - if name not in self: - self[name] = NamedSignal(name, doc) - - return self[name] # type: ignore[no-any-return] - - -default_namespace: Namespace = Namespace() -"""A default :class:`Namespace` for creating named signals. :func:`signal` -creates a :class:`NamedSignal` in this namespace. -""" - -signal: PNamespaceSignal = default_namespace.signal -"""Return a :class:`NamedSignal` in :data:`default_namespace` with the given -``name``, creating it if required. Repeated calls with the same name return the -same signal. -""" - - -def __getattr__(name: str) -> t.Any: - if name == "receiver_connected": - warnings.warn( - "The global 'receiver_connected' signal is deprecated and will be" - " removed in Blinker 1.9. Use 'Signal.receiver_connected' and" - " 'Signal.receiver_disconnected' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _receiver_connected - - if name == "WeakNamespace": - warnings.warn( - "'WeakNamespace' is deprecated and will be removed in Blinker 1.9." - " Use 'Namespace' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _WeakNamespace - - raise AttributeError(name) diff --git a/myenv/Lib/site-packages/blinker/py.typed b/myenv/Lib/site-packages/blinker/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/INSTALLER b/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/LICENSE b/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/LICENSE deleted file mode 100644 index 62b076c..0000000 --- a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -This package contains a modified version of ca-bundle.crt: - -ca-bundle.crt -- Bundle of CA Root Certificates - -This is a bundle of X.509 certificates of public Certificate Authorities -(CA). These were automatically extracted from Mozilla's root certificates -file (certdata.txt). This file can be found in the mozilla source tree: -https://hg.mozilla.org/mozilla-central/file/tip/security/nss/lib/ckfw/builtins/certdata.txt -It contains the certificates in PEM format and therefore -can be directly used with curl / libcurl / php_curl, or with -an Apache+mod_ssl webserver for SSL client authentication. -Just configure this file as the SSLCACertificateFile.# - -***** BEGIN LICENSE BLOCK ***** -This Source Code Form is subject to the terms of the Mozilla Public License, -v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain -one at http://mozilla.org/MPL/2.0/. - -***** END LICENSE BLOCK ***** -@(#) $RCSfile: certdata.txt,v $ $Revision: 1.80 $ $Date: 2011/11/03 15:11:58 $ diff --git a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/METADATA b/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/METADATA deleted file mode 100644 index 0a3a772..0000000 --- a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/METADATA +++ /dev/null @@ -1,67 +0,0 @@ -Metadata-Version: 2.1 -Name: certifi -Version: 2024.8.30 -Summary: Python package for providing Mozilla's CA Bundle. -Home-page: https://github.com/certifi/python-certifi -Author: Kenneth Reitz -Author-email: me@kennethreitz.com -License: MPL-2.0 -Project-URL: Source, https://github.com/certifi/python-certifi -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0) -Classifier: Natural Language :: English -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Requires-Python: >=3.6 -License-File: LICENSE - -Certifi: Python SSL Certificates -================================ - -Certifi provides Mozilla's carefully curated collection of Root Certificates for -validating the trustworthiness of SSL certificates while verifying the identity -of TLS hosts. It has been extracted from the `Requests`_ project. - -Installation ------------- - -``certifi`` is available on PyPI. Simply install it with ``pip``:: - - $ pip install certifi - -Usage ------ - -To reference the installed certificate authority (CA) bundle, you can use the -built-in function:: - - >>> import certifi - - >>> certifi.where() - '/usr/local/lib/python3.7/site-packages/certifi/cacert.pem' - -Or from the command line:: - - $ python -m certifi - /usr/local/lib/python3.7/site-packages/certifi/cacert.pem - -Enjoy! - -.. _`Requests`: https://requests.readthedocs.io/en/master/ - -Addition/Removal of Certificates --------------------------------- - -Certifi does not support any addition/removal or other modification of the -CA trust store content. This project is intended to provide a reliable and -highly portable root of trust to python deployments. Look to upstream projects -for methods to use alternate trust. diff --git a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/RECORD b/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/RECORD deleted file mode 100644 index 88c23e6..0000000 --- a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/RECORD +++ /dev/null @@ -1,14 +0,0 @@ -certifi-2024.8.30.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -certifi-2024.8.30.dist-info/LICENSE,sha256=6TcW2mucDVpKHfYP5pWzcPBpVgPSH2-D8FPkLPwQyvc,989 -certifi-2024.8.30.dist-info/METADATA,sha256=GhBHRVUN6a4ZdUgE_N5wmukJfyuoE-QyIl8Y3ifNQBM,2222 -certifi-2024.8.30.dist-info/RECORD,, -certifi-2024.8.30.dist-info/WHEEL,sha256=UvcQYKBHoFqaQd6LKyqHw9fxEolWLQnlzP0h_LgJAfI,91 -certifi-2024.8.30.dist-info/top_level.txt,sha256=KMu4vUCfsjLrkPbSNdgdekS-pVJzBAJFO__nI8NF6-U,8 -certifi/__init__.py,sha256=p_GYZrjUwPBUhpLlCZoGb0miKBKSqDAyZC5DvIuqbHQ,94 -certifi/__main__.py,sha256=xBBoj905TUWBLRGANOcf7oi6e-3dMP4cEoG9OyMs11g,243 -certifi/__pycache__/__init__.cpython-312.pyc,, -certifi/__pycache__/__main__.cpython-312.pyc,, -certifi/__pycache__/core.cpython-312.pyc,, -certifi/cacert.pem,sha256=lO3rZukXdPyuk6BWUJFOKQliWaXH6HGh9l1GGrUgG0c,299427 -certifi/core.py,sha256=qRDDFyXVJwTB_EmoGppaXU_R9qCZvhl-EzxPMuV3nTA,4426 -certifi/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/WHEEL b/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/WHEEL deleted file mode 100644 index 57e56b7..0000000 --- a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (74.0.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/top_level.txt b/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/top_level.txt deleted file mode 100644 index 963eac5..0000000 --- a/myenv/Lib/site-packages/certifi-2024.8.30.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -certifi diff --git a/myenv/Lib/site-packages/certifi/__init__.py b/myenv/Lib/site-packages/certifi/__init__.py deleted file mode 100644 index f61d77f..0000000 --- a/myenv/Lib/site-packages/certifi/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .core import contents, where - -__all__ = ["contents", "where"] -__version__ = "2024.08.30" diff --git a/myenv/Lib/site-packages/certifi/__main__.py b/myenv/Lib/site-packages/certifi/__main__.py deleted file mode 100644 index 8945b5d..0000000 --- a/myenv/Lib/site-packages/certifi/__main__.py +++ /dev/null @@ -1,12 +0,0 @@ -import argparse - -from certifi import contents, where - -parser = argparse.ArgumentParser() -parser.add_argument("-c", "--contents", action="store_true") -args = parser.parse_args() - -if args.contents: - print(contents()) -else: - print(where()) diff --git a/myenv/Lib/site-packages/certifi/cacert.pem b/myenv/Lib/site-packages/certifi/cacert.pem deleted file mode 100644 index 3c165a1..0000000 --- a/myenv/Lib/site-packages/certifi/cacert.pem +++ /dev/null @@ -1,4929 +0,0 @@ - -# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA -# Label: "GlobalSign Root CA" -# Serial: 4835703278459707669005204 -# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a -# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c -# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 ------BEGIN CERTIFICATE----- -MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG -A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv -b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw -MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i -YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT -aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ -jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp -xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp -1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG -snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ -U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 -9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B -AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz -yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE -38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP -AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad -DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME -HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== ------END CERTIFICATE----- - -# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited -# Label: "Entrust.net Premium 2048 Secure Server CA" -# Serial: 946069240 -# MD5 Fingerprint: ee:29:31:bc:32:7e:9a:e6:e8:b5:f7:51:b4:34:71:90 -# SHA1 Fingerprint: 50:30:06:09:1d:97:d4:f5:ae:39:f7:cb:e7:92:7d:7d:65:2d:34:31 -# SHA256 Fingerprint: 6d:c4:71:72:e0:1c:bc:b0:bf:62:58:0d:89:5f:e2:b8:ac:9a:d4:f8:73:80:1e:0c:10:b9:c8:37:d2:1e:b1:77 ------BEGIN CERTIFICATE----- -MIIEKjCCAxKgAwIBAgIEOGPe+DANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML -RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp -bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 -IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0yOTA3 -MjQxNDE1MTJaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 -LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp -YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG -A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp -MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq -K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe -sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX -MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT -XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ -HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH -4QIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUVeSB0RGAvtiJuQijMfmhJAkWuXAwDQYJKoZIhvcNAQEFBQADggEBADub -j1abMOdTmXx6eadNl9cZlZD7Bh/KM3xGY4+WZiT6QBshJ8rmcnPyT/4xmf3IDExo -U8aAghOY+rat2l098c5u9hURlIIM7j+VrxGrD9cv3h8Dj1csHsm7mhpElesYT6Yf -zX1XEC+bBAlahLVu2B064dae0Wx5XnkcFMXj0EyTO2U87d89vqbllRrDtRnDvV5b -u/8j72gZyxKTJ1wDLW8w0B62GqzeWvfRqqgnpv55gcR5mTNXuhKwqeBCbJPKVt7+ -bYQLCIt+jerXmCHG8+c8eS9enNFMFY3h7CI3zJpDC5fcgJCNs2ebb0gIFVbPv/Er -fF6adulZkMV8gzURZVE= ------END CERTIFICATE----- - -# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust -# Label: "Baltimore CyberTrust Root" -# Serial: 33554617 -# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 -# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 -# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ -RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD -VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX -DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y -ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy -VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr -mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr -IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK -mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu -XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy -dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye -jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 -BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 -DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 -9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx -jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 -Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz -ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS -R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. -# Label: "Entrust Root Certification Authority" -# Serial: 1164660820 -# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 -# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 -# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c ------BEGIN CERTIFICATE----- -MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 -Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW -KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl -cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw -NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw -NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy -ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV -BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ -KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo -Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 -4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 -KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI -rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi -94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB -sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi -gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo -kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE -vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA -A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t -O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua -AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP -9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ -eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m -0vdXcDazv/wor3ElhVsT/h5/WrQ8 ------END CERTIFICATE----- - -# Issuer: CN=AAA Certificate Services O=Comodo CA Limited -# Subject: CN=AAA Certificate Services O=Comodo CA Limited -# Label: "Comodo AAA Services root" -# Serial: 1 -# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 -# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 -# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 ------BEGIN CERTIFICATE----- -MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb -MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow -GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj -YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM -GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP -ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua -BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe -3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 -YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR -rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm -ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU -oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF -MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v -QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t -b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF -AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q -GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz -Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 -G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi -l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 -smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2" -# Serial: 1289 -# MD5 Fingerprint: 5e:39:7b:dd:f8:ba:ec:82:e9:ac:62:ba:0c:54:00:2b -# SHA1 Fingerprint: ca:3a:fb:cf:12:40:36:4b:44:b2:16:20:88:80:48:39:19:93:7c:f7 -# SHA256 Fingerprint: 85:a0:dd:7d:d7:20:ad:b7:ff:05:f8:3d:54:2b:20:9d:c7:ff:45:28:f7:d6:77:b1:83:89:fe:a5:e5:c4:9e:86 ------BEGIN CERTIFICATE----- -MIIFtzCCA5+gAwIBAgICBQkwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMjAeFw0wNjExMjQxODI3MDBaFw0zMTExMjQxODIzMzNaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCa -GMpLlA0ALa8DKYrwD4HIrkwZhR0In6spRIXzL4GtMh6QRr+jhiYaHv5+HBg6XJxg -Fyo6dIMzMH1hVBHL7avg5tKifvVrbxi3Cgst/ek+7wrGsxDp3MJGF/hd/aTa/55J -WpzmM+Yklvc/ulsrHHo1wtZn/qtmUIttKGAr79dgw8eTvI02kfN/+NsRE8Scd3bB -rrcCaoF6qUWD4gXmuVbBlDePSHFjIuwXZQeVikvfj8ZaCuWw419eaxGrDPmF60Tp -+ARz8un+XJiM9XOva7R+zdRcAitMOeGylZUtQofX1bOQQ7dsE/He3fbE+Ik/0XX1 -ksOR1YqI0JDs3G3eicJlcZaLDQP9nL9bFqyS2+r+eXyt66/3FsvbzSUr5R/7mp/i -Ucw6UwxI5g69ybR2BlLmEROFcmMDBOAENisgGQLodKcftslWZvB1JdxnwQ5hYIiz -PtGo/KPaHbDRsSNU30R2be1B2MGyIrZTHN81Hdyhdyox5C315eXbyOD/5YDXC2Og -/zOhD7osFRXql7PSorW+8oyWHhqPHWykYTe5hnMz15eWniN9gqRMgeKh0bpnX5UH -oycR7hYQe7xFSkyyBNKr79X9DFHOUGoIMfmR2gyPZFwDwzqLID9ujWc9Otb+fVuI -yV77zGHcizN300QyNQliBJIWENieJ0f7OyHj+OsdWwIDAQABo4GwMIGtMA8GA1Ud -EwEB/wQFMAMBAf8wCwYDVR0PBAQDAgEGMB0GA1UdDgQWBBQahGK8SEwzJQTU7tD2 -A8QZRtGUazBuBgNVHSMEZzBlgBQahGK8SEwzJQTU7tD2A8QZRtGUa6FJpEcwRTEL -MAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMT -ElF1b1ZhZGlzIFJvb3QgQ0EgMoICBQkwDQYJKoZIhvcNAQEFBQADggIBAD4KFk2f -BluornFdLwUvZ+YTRYPENvbzwCYMDbVHZF34tHLJRqUDGCdViXh9duqWNIAXINzn -g/iN/Ae42l9NLmeyhP3ZRPx3UIHmfLTJDQtyU/h2BwdBR5YM++CCJpNVjP4iH2Bl -fF/nJrP3MpCYUNQ3cVX2kiF495V5+vgtJodmVjB3pjd4M1IQWK4/YY7yarHvGH5K -WWPKjaJW1acvvFYfzznB4vsKqBUsfU16Y8Zsl0Q80m/DShcK+JDSV6IZUaUtl0Ha -B0+pUNqQjZRG4T7wlP0QADj1O+hA4bRuVhogzG9Yje0uRY/W6ZM/57Es3zrWIozc -hLsib9D45MY56QSIPMO661V6bYCZJPVsAfv4l7CUW+v90m/xd2gNNWQjrLhVoQPR -TUIZ3Ph1WVaj+ahJefivDrkRoHy3au000LYmYjgahwz46P0u05B/B5EqHdZ+XIWD -mbA4CD/pXvk1B+TJYm5Xf6dQlfe6yJvmjqIBxdZmv3lh8zwc4bmCXF2gw+nYSL0Z -ohEUGW6yhhtoPkg3Goi3XZZenMfvJ2II4pEZXNLxId26F0KCl3GBUzGpn/Z9Yr9y -4aOTHcyKJloJONDO1w2AFrR4pTqHTI2KpdVGl/IsELm8VCLAAVBpQ570su9t+Oza -8eOx79+Rj1QqCyXBJhnEUhAFZdWCEOrCMc0u ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3" -# Serial: 1478 -# MD5 Fingerprint: 31:85:3c:62:94:97:63:b9:aa:fd:89:4e:af:6f:e0:cf -# SHA1 Fingerprint: 1f:49:14:f7:d8:74:95:1d:dd:ae:02:c0:be:fd:3a:2d:82:75:51:85 -# SHA256 Fingerprint: 18:f1:fc:7f:20:5d:f8:ad:dd:eb:7f:e0:07:dd:57:e3:af:37:5a:9c:4d:8d:73:54:6b:f4:f1:fe:d1:e1:8d:35 ------BEGIN CERTIFICATE----- -MIIGnTCCBIWgAwIBAgICBcYwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQk0x -GTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxGzAZBgNVBAMTElF1b1ZhZGlzIFJv -b3QgQ0EgMzAeFw0wNjExMjQxOTExMjNaFw0zMTExMjQxOTA2NDRaMEUxCzAJBgNV -BAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBMaW1pdGVkMRswGQYDVQQDExJRdW9W -YWRpcyBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDM -V0IWVJzmmNPTTe7+7cefQzlKZbPoFog02w1ZkXTPkrgEQK0CSzGrvI2RaNggDhoB -4hp7Thdd4oq3P5kazethq8Jlph+3t723j/z9cI8LoGe+AaJZz3HmDyl2/7FWeUUr -H556VOijKTVopAFPD6QuN+8bv+OPEKhyq1hX51SGyMnzW9os2l2ObjyjPtr7guXd -8lyyBTNvijbO0BNO/79KDDRMpsMhvVAEVeuxu537RR5kFd5VAYwCdrXLoT9Cabwv -vWhDFlaJKjdhkf2mrk7AyxRllDdLkgbvBNDInIjbC3uBr7E9KsRlOni27tyAsdLT -mZw67mtaa7ONt9XOnMK+pUsvFrGeaDsGb659n/je7Mwpp5ijJUMv7/FfJuGITfhe -btfZFG4ZM2mnO4SJk8RTVROhUXhA+LjJou57ulJCg54U7QVSWllWp5f8nT8KKdjc -T5EOE7zelaTfi5m+rJsziO+1ga8bxiJTyPbH7pcUsMV8eFLI8M5ud2CEpukqdiDt -WAEXMJPpGovgc2PZapKUSU60rUqFxKMiMPwJ7Wgic6aIDFUhWMXhOp8q3crhkODZ -c6tsgLjoC2SToJyMGf+z0gzskSaHirOi4XCPLArlzW1oUevaPwV/izLmE1xr/l9A -4iLItLRkT9a6fUg+qGkM17uGcclzuD87nSVL2v9A6wIDAQABo4IBlTCCAZEwDwYD -VR0TAQH/BAUwAwEB/zCB4QYDVR0gBIHZMIHWMIHTBgkrBgEEAb5YAAMwgcUwgZMG -CCsGAQUFBwICMIGGGoGDQW55IHVzZSBvZiB0aGlzIENlcnRpZmljYXRlIGNvbnN0 -aXR1dGVzIGFjY2VwdGFuY2Ugb2YgdGhlIFF1b1ZhZGlzIFJvb3QgQ0EgMyBDZXJ0 -aWZpY2F0ZSBQb2xpY3kgLyBDZXJ0aWZpY2F0aW9uIFByYWN0aWNlIFN0YXRlbWVu -dC4wLQYIKwYBBQUHAgEWIWh0dHA6Ly93d3cucXVvdmFkaXNnbG9iYWwuY29tL2Nw -czALBgNVHQ8EBAMCAQYwHQYDVR0OBBYEFPLAE+CCQz777i9nMpY1XNu4ywLQMG4G -A1UdIwRnMGWAFPLAE+CCQz777i9nMpY1XNu4ywLQoUmkRzBFMQswCQYDVQQGEwJC -TTEZMBcGA1UEChMQUXVvVmFkaXMgTGltaXRlZDEbMBkGA1UEAxMSUXVvVmFkaXMg -Um9vdCBDQSAzggIFxjANBgkqhkiG9w0BAQUFAAOCAgEAT62gLEz6wPJv92ZVqyM0 -7ucp2sNbtrCD2dDQ4iH782CnO11gUyeim/YIIirnv6By5ZwkajGxkHon24QRiSem -d1o417+shvzuXYO8BsbRd2sPbSQvS3pspweWyuOEn62Iix2rFo1bZhfZFvSLgNLd -+LJ2w/w4E6oM3kJpK27zPOuAJ9v1pkQNn1pVWQvVDVJIxa6f8i+AxeoyUDUSly7B -4f/xI4hROJ/yZlZ25w9Rl6VSDE1JUZU2Pb+iSwwQHYaZTKrzchGT5Or2m9qoXadN -t54CrnMAyNojA+j56hl0YgCUyyIgvpSnWbWCar6ZeXqp8kokUvd0/bpO5qgdAm6x -DYBEwa7TIzdfu4V8K5Iu6H6li92Z4b8nby1dqnuH/grdS/yO9SbkbnBCbjPsMZ57 -k8HkyWkaPcBrTiJt7qtYTcbQQcEr6k8Sh17rRdhs9ZgC06DYVYoGmRmioHfRMJ6s -zHXug/WwYjnPbFfiTNKRCw51KBuav/0aQ/HKd/s7j2G4aSgWQgRecCocIdiP4b0j -Wy10QJLZYxkNc91pvGJHvOB0K7Lrfb5BG7XARsWhIstfTsEokt4YutUqKLsRixeT -mJlglFwjz1onl14LBQaTNx47aTbrqZ5hHY8y2o4M1nQ+ewkk2gF3R8Q7zTSMmfXK -4SVhM7JZG+Ju1zdXtg2pEto= ------END CERTIFICATE----- - -# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com -# Label: "XRamp Global CA Root" -# Serial: 107108908803651509692980124233745014957 -# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 -# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 -# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 ------BEGIN CERTIFICATE----- -MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB -gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk -MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY -UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx -NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 -dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy -dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 -38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP -KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q -DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 -qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa -JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi -PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P -BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs -jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 -eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD -ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR -vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt -qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa -IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy -i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ -O+7ETPTsJ3xCwnR8gooJybQDJbw= ------END CERTIFICATE----- - -# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority -# Label: "Go Daddy Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 -# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 -# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 ------BEGIN CERTIFICATE----- -MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh -MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE -YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 -MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo -ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg -MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN -ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA -PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w -wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi -EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY -avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ -YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE -sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h -/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 -IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD -ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy -OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P -TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ -HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER -dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf -ReYNnyicsbkqWletNw+vHX/bvZ8= ------END CERTIFICATE----- - -# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority -# Label: "Starfield Class 2 CA" -# Serial: 0 -# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 -# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a -# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 ------BEGIN CERTIFICATE----- -MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl -MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp -U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw -NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE -ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp -ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 -DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf -8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN -+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 -X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa -K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA -1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G -A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR -zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 -YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD -bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w -DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 -L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D -eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl -xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp -VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY -WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root CA" -# Serial: 17154717934120587862167794914071425081 -# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 -# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 -# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c ------BEGIN CERTIFICATE----- -MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c -JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP -mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ -wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 -VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ -AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB -AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun -pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC -dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf -fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm -NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx -H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe -+o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root CA" -# Serial: 10944719598952040374951832963794454346 -# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e -# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 -# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 ------BEGIN CERTIFICATE----- -MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD -QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB -CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 -nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt -43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P -T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 -gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR -TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw -DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr -hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg -06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF -PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls -YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk -CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert High Assurance EV Root CA" -# Serial: 3553400076410547919724730734378100087 -# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a -# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 -# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j -ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL -MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 -LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug -RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm -+9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW -PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM -xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB -Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 -hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg -EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA -FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec -nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z -eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF -hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 -Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe -vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep -+OkuE6N36B9K ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Gold CA - G2 O=SwissSign AG -# Label: "SwissSign Gold CA - G2" -# Serial: 13492815561806991280 -# MD5 Fingerprint: 24:77:d9:a8:91:d1:3b:fa:88:2d:c2:ff:f8:cd:33:93 -# SHA1 Fingerprint: d8:c5:38:8a:b7:30:1b:1b:6e:d4:7a:e6:45:25:3a:6f:9f:1a:27:61 -# SHA256 Fingerprint: 62:dd:0b:e9:b9:f5:0a:16:3e:a0:f8:e7:5c:05:3b:1e:ca:57:ea:55:c8:68:8f:64:7c:68:81:f2:c8:35:7b:95 ------BEGIN CERTIFICATE----- -MIIFujCCA6KgAwIBAgIJALtAHEP1Xk+wMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV -BAYTAkNIMRUwEwYDVQQKEwxTd2lzc1NpZ24gQUcxHzAdBgNVBAMTFlN3aXNzU2ln -biBHb2xkIENBIC0gRzIwHhcNMDYxMDI1MDgzMDM1WhcNMzYxMDI1MDgzMDM1WjBF -MQswCQYDVQQGEwJDSDEVMBMGA1UEChMMU3dpc3NTaWduIEFHMR8wHQYDVQQDExZT -d2lzc1NpZ24gR29sZCBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEAr+TufoskDhJuqVAtFkQ7kpJcyrhdhJJCEyq8ZVeCQD5XJM1QiyUqt2/8 -76LQwB8CJEoTlo8jE+YoWACjR8cGp4QjK7u9lit/VcyLwVcfDmJlD909Vopz2q5+ -bbqBHH5CjCA12UNNhPqE21Is8w4ndwtrvxEvcnifLtg+5hg3Wipy+dpikJKVyh+c -6bM8K8vzARO/Ws/BtQpgvd21mWRTuKCWs2/iJneRjOBiEAKfNA+k1ZIzUd6+jbqE -emA8atufK+ze3gE/bk3lUIbLtK/tREDFylqM2tIrfKjuvqblCqoOpd8FUrdVxyJd -MmqXl2MT28nbeTZ7hTpKxVKJ+STnnXepgv9VHKVxaSvRAiTysybUa9oEVeXBCsdt -MDeQKuSeFDNeFhdVxVu1yzSJkvGdJo+hB9TGsnhQ2wwMC3wLjEHXuendjIj3o02y -MszYF9rNt85mndT9Xv+9lz4pded+p2JYryU0pUHHPbwNUMoDAw8IWh+Vc3hiv69y -FGkOpeUDDniOJihC8AcLYiAQZzlG+qkDzAQ4embvIIO1jEpWjpEA/I5cgt6IoMPi -aG59je883WX0XaxR7ySArqpWl2/5rX3aYT+YdzylkbYcjCbaZaIJbcHiVOO5ykxM -gI93e2CaHt+28kgeDrpOVG2Y4OGiGqJ3UM/EY5LsRxmd6+ZrzsECAwEAAaOBrDCB -qTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUWyV7 -lqRlUX64OfPAeGZe6Drn8O4wHwYDVR0jBBgwFoAUWyV7lqRlUX64OfPAeGZe6Drn -8O4wRgYDVR0gBD8wPTA7BglghXQBWQECAQEwLjAsBggrBgEFBQcCARYgaHR0cDov -L3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIBACe6 -45R88a7A3hfm5djV9VSwg/S7zV4Fe0+fdWavPOhWfvxyeDgD2StiGwC5+OlgzczO -UYrHUDFu4Up+GC9pWbY9ZIEr44OE5iKHjn3g7gKZYbge9LgriBIWhMIxkziWMaa5 -O1M/wySTVltpkuzFwbs4AOPsF6m43Md8AYOfMke6UiI0HTJ6CVanfCU2qT1L2sCC -bwq7EsiHSycR+R4tx5M/nttfJmtS2S6K8RTGRI0Vqbe/vd6mGu6uLftIdxf+u+yv -GPUqUfA5hJeVbG4bwyvEdGB5JbAKJ9/fXtI5z0V9QkvfsywexcZdylU6oJxpmo/a -77KwPJ+HbBIrZXAVUjEaJM9vMSNQH4xPjyPDdEFjHFWoFN0+4FFQz/EbMFYOkrCC -hdiDyyJkvC24JdVUorgG6q2SpCSgwYa1ShNqR88uC1aVVMvOmttqtKay20EIhid3 -92qgQmwLOM7XdVAyksLfKzAiSNDVQTglXaTpXZ/GlHXQRf0wl0OPkKsKx4ZzYEpp -Ld6leNcG2mqeSz53OiATIgHQv2ieY2BrNU0LbbqhPcCT4H8js1WtciVORvnSFu+w -ZMEBnunKoGqYDs/YYPIvSbjkQuE4NRb0yG5P94FW6LqjviOvrv1vA+ACOzB2+htt -Qc8Bsem4yWb02ybzOqR08kkkW8mw0FfB+j564ZfJ ------END CERTIFICATE----- - -# Issuer: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Subject: CN=SwissSign Silver CA - G2 O=SwissSign AG -# Label: "SwissSign Silver CA - G2" -# Serial: 5700383053117599563 -# MD5 Fingerprint: e0:06:a1:c9:7d:cf:c9:fc:0d:c0:56:75:96:d8:62:13 -# SHA1 Fingerprint: 9b:aa:e5:9f:56:ee:21:cb:43:5a:be:25:93:df:a7:f0:40:d1:1d:cb -# SHA256 Fingerprint: be:6c:4d:a2:bb:b9:ba:59:b6:f3:93:97:68:37:42:46:c3:c0:05:99:3f:a9:8f:02:0d:1d:ed:be:d4:8a:81:d5 ------BEGIN CERTIFICATE----- -MIIFvTCCA6WgAwIBAgIITxvUL1S7L0swDQYJKoZIhvcNAQEFBQAwRzELMAkGA1UE -BhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMYU3dpc3NTaWdu -IFNpbHZlciBDQSAtIEcyMB4XDTA2MTAyNTA4MzI0NloXDTM2MTAyNTA4MzI0Nlow -RzELMAkGA1UEBhMCQ0gxFTATBgNVBAoTDFN3aXNzU2lnbiBBRzEhMB8GA1UEAxMY -U3dpc3NTaWduIFNpbHZlciBDQSAtIEcyMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A -MIICCgKCAgEAxPGHf9N4Mfc4yfjDmUO8x/e8N+dOcbpLj6VzHVxumK4DV644N0Mv -Fz0fyM5oEMF4rhkDKxD6LHmD9ui5aLlV8gREpzn5/ASLHvGiTSf5YXu6t+WiE7br -YT7QbNHm+/pe7R20nqA1W6GSy/BJkv6FCgU+5tkL4k+73JU3/JHpMjUi0R86TieF -nbAVlDLaYQ1HTWBCrpJH6INaUFjpiou5XaHc3ZlKHzZnu0jkg7Y360g6rw9njxcH -6ATK72oxh9TAtvmUcXtnZLi2kUpCe2UuMGoM9ZDulebyzYLs2aFK7PayS+VFheZt -eJMELpyCbTapxDFkH4aDCyr0NQp4yVXPQbBH6TCfmb5hqAaEuSh6XzjZG6k4sIN/ -c8HDO0gqgg8hm7jMqDXDhBuDsz6+pJVpATqJAHgE2cn0mRmrVn5bi4Y5FZGkECwJ -MoBgs5PAKrYYC51+jUnyEEp/+dVGLxmSo5mnJqy7jDzmDrxHB9xzUfFwZC8I+bRH -HTBsROopN4WSaGa8gzj+ezku01DwH/teYLappvonQfGbGHLy9YR0SslnxFSuSGTf -jNFusB3hB48IHpmccelM2KX3RxIfdNFRnobzwqIjQAtz20um53MGjMGg6cFZrEb6 -5i/4z3GcRm25xBWNOHkDRUjvxF3XCO6HOSKGsg0PWEP3calILv3q1h8CAwEAAaOB -rDCBqTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -F6DNweRBtjpbO8tFnb0cwpj6hlgwHwYDVR0jBBgwFoAUF6DNweRBtjpbO8tFnb0c -wpj6hlgwRgYDVR0gBD8wPTA7BglghXQBWQEDAQEwLjAsBggrBgEFBQcCARYgaHR0 -cDovL3JlcG9zaXRvcnkuc3dpc3NzaWduLmNvbS8wDQYJKoZIhvcNAQEFBQADggIB -AHPGgeAn0i0P4JUw4ppBf1AsX19iYamGamkYDHRJ1l2E6kFSGG9YrVBWIGrGvShp -WJHckRE1qTodvBqlYJ7YH39FkWnZfrt4csEGDyrOj4VwYaygzQu4OSlWhDJOhrs9 -xCrZ1x9y7v5RoSJBsXECYxqCsGKrXlcSH9/L3XWgwF15kIwb4FDm3jH+mHtwX6WQ -2K34ArZv02DdQEsixT2tOnqfGhpHkXkzuoLcMmkDlm4fS/Bx/uNncqCxv1yL5PqZ -IseEuRuNI5c/7SXgz2W79WEE790eslpBIlqhn10s6FvJbakMDHiqYMZWjwFaDGi8 -aRl5xB9+lwW/xekkUV7U1UtT7dkjWjYDZaPBA61BMPNGG4WQr2W11bHkFlt4dR2X -em1ZqSqPe97Dh4kQmUlzeMg9vVE1dCrV8X5pGyq7O70luJpaPXJhkGaH7gzWTdQR -dAtq/gsD/KNVV4n+SsuuWxcFyPKNIzFTONItaj+CuY0IavdeQXRuwxF+B6wpYJE/ -OMpXEA29MC/HpeZBoNquBYeaoKRlbEwJDIm6uNO5wJOKMPqN5ZprFQFOZ6raYlY+ -hAhm0sQ2fac+EPyI4NSA5QC9qvNOBqN6avlicuMJT+ubDgEj8Z+7fNzcbBGXJbLy -tGMU0gYqZ4yD9c7qB9iaah7s5Aq7KkzrCWA5zspi2C5u ------END CERTIFICATE----- - -# Issuer: CN=SecureTrust CA O=SecureTrust Corporation -# Subject: CN=SecureTrust CA O=SecureTrust Corporation -# Label: "SecureTrust CA" -# Serial: 17199774589125277788362757014266862032 -# MD5 Fingerprint: dc:32:c3:a7:6d:25:57:c7:68:09:9d:ea:2d:a9:a2:d1 -# SHA1 Fingerprint: 87:82:c6:c3:04:35:3b:cf:d2:96:92:d2:59:3e:7d:44:d9:34:ff:11 -# SHA256 Fingerprint: f1:c1:b5:0a:e5:a2:0d:d8:03:0e:c9:f6:bc:24:82:3d:d3:67:b5:25:57:59:b4:e7:1b:61:fc:e9:f7:37:5d:73 ------BEGIN CERTIFICATE----- -MIIDuDCCAqCgAwIBAgIQDPCOXAgWpa1Cf/DrJxhZ0DANBgkqhkiG9w0BAQUFADBI -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -FzAVBgNVBAMTDlNlY3VyZVRydXN0IENBMB4XDTA2MTEwNzE5MzExOFoXDTI5MTIz -MTE5NDA1NVowSDELMAkGA1UEBhMCVVMxIDAeBgNVBAoTF1NlY3VyZVRydXN0IENv -cnBvcmF0aW9uMRcwFQYDVQQDEw5TZWN1cmVUcnVzdCBDQTCCASIwDQYJKoZIhvcN -AQEBBQADggEPADCCAQoCggEBAKukgeWVzfX2FI7CT8rU4niVWJxB4Q2ZQCQXOZEz -Zum+4YOvYlyJ0fwkW2Gz4BERQRwdbvC4u/jep4G6pkjGnx29vo6pQT64lO0pGtSO -0gMdA+9tDWccV9cGrcrI9f4Or2YlSASWC12juhbDCE/RRvgUXPLIXgGZbf2IzIao -wW8xQmxSPmjL8xk037uHGFaAJsTQ3MBv396gwpEWoGQRS0S8Hvbn+mPeZqx2pHGj -7DaUaHp3pLHnDi+BeuK1cobvomuL8A/b01k/unK8RCSc43Oz969XL0Imnal0ugBS -8kvNU3xHCzaFDmapCJcWNFfBZveA4+1wVMeT4C4oFVmHursCAwEAAaOBnTCBmjAT -BgkrBgEEAYI3FAIEBh4EAEMAQTALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQUQjK2FvoE/f5dS3rD/fdMQB1aQ68wNAYDVR0fBC0wKzApoCeg -JYYjaHR0cDovL2NybC5zZWN1cmV0cnVzdC5jb20vU1RDQS5jcmwwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEFBQADggEBADDtT0rhWDpSclu1pqNlGKa7UTt3 -6Z3q059c4EVlew3KW+JwULKUBRSuSceNQQcSc5R+DCMh/bwQf2AQWnL1mA6s7Ll/ -3XpvXdMc9P+IBWlCqQVxyLesJugutIxq/3HcuLHfmbx8IVQr5Fiiu1cprp6poxkm -D5kuCLDv/WnPmRoJjeOnnyvJNjR7JLN4TJUXpAYmHrZkUjZfYGfZnMUFdAvnZyPS -CPyI6a6Lf+Ew9Dd+/cYy2i2eRDAwbO4H3tI0/NL/QPZL9GZGBlSm8jIKYyYwa5vR -3ItHuuG51WLQoqD0ZwV4KWMabwTW+MZMo5qxN7SN5ShLHZ4swrhovO0C7jE= ------END CERTIFICATE----- - -# Issuer: CN=Secure Global CA O=SecureTrust Corporation -# Subject: CN=Secure Global CA O=SecureTrust Corporation -# Label: "Secure Global CA" -# Serial: 9751836167731051554232119481456978597 -# MD5 Fingerprint: cf:f4:27:0d:d4:ed:dc:65:16:49:6d:3d:da:bf:6e:de -# SHA1 Fingerprint: 3a:44:73:5a:e5:81:90:1f:24:86:61:46:1e:3b:9c:c4:5f:f5:3a:1b -# SHA256 Fingerprint: 42:00:f5:04:3a:c8:59:0e:bb:52:7d:20:9e:d1:50:30:29:fb:cb:d4:1c:a1:b5:06:ec:27:f1:5a:de:7d:ac:69 ------BEGIN CERTIFICATE----- -MIIDvDCCAqSgAwIBAgIQB1YipOjUiolN9BPI8PjqpTANBgkqhkiG9w0BAQUFADBK -MQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3QgQ29ycG9yYXRpb24x -GTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwHhcNMDYxMTA3MTk0MjI4WhcNMjkx -MjMxMTk1MjA2WjBKMQswCQYDVQQGEwJVUzEgMB4GA1UEChMXU2VjdXJlVHJ1c3Qg -Q29ycG9yYXRpb24xGTAXBgNVBAMTEFNlY3VyZSBHbG9iYWwgQ0EwggEiMA0GCSqG -SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvNS7YrGxVaQZx5RNoJLNP2MwhR/jxYDiJ -iQPpvepeRlMJ3Fz1Wuj3RSoC6zFh1ykzTM7HfAo3fg+6MpjhHZevj8fcyTiW89sa -/FHtaMbQbqR8JNGuQsiWUGMu4P51/pinX0kuleM5M2SOHqRfkNJnPLLZ/kG5VacJ -jnIFHovdRIWCQtBJwB1g8NEXLJXr9qXBkqPFwqcIYA1gBBCWeZ4WNOaptvolRTnI -HmX5k/Wq8VLcmZg9pYYaDDUz+kulBAYVHDGA76oYa8J719rO+TMg1fW9ajMtgQT7 -sFzUnKPiXB3jqUJ1XnvUd+85VLrJChgbEplJL4hL/VBi0XPnj3pDAgMBAAGjgZ0w -gZowEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFK9EBMJBfkiD2045AuzshHrmzsmkMDQGA1UdHwQtMCsw -KaAnoCWGI2h0dHA6Ly9jcmwuc2VjdXJldHJ1c3QuY29tL1NHQ0EuY3JsMBAGCSsG -AQQBgjcVAQQDAgEAMA0GCSqGSIb3DQEBBQUAA4IBAQBjGghAfaReUw132HquHw0L -URYD7xh8yOOvaliTFGCRsoTciE6+OYo68+aCiV0BN7OrJKQVDpI1WkpEXk5X+nXO -H0jOZvQ8QCaSmGwb7iRGDBezUqXbpZGRzzfTb+cnCDpOGR86p1hcF895P4vkp9Mm -I50mD1hp/Ed+stCNi5O/KU9DaXR2Z0vPB4zmAve14bRDtUstFJ/53CYNv6ZHdAbY -iNE6KTCEztI5gGIbqMdXSbxqVVFnFUq+NQfk1XWYN3kwFNspnWzFacxHVaIw98xc -f8LDmBxrThaA63p4ZUWiABqvDA1VZDRIuJK58bRQKfJPIx/abKwfROHdI3hRW8cW ------END CERTIFICATE----- - -# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO Certification Authority O=COMODO CA Limited -# Label: "COMODO Certification Authority" -# Serial: 104350513648249232941998508985834464573 -# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 -# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b -# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 ------BEGIN CERTIFICATE----- -MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB -gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV -BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw -MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl -YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P -RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 -aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 -UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI -2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 -Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp -+2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ -DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O -nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW -/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g -PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u -QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY -SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv -IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ -RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 -zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd -BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB -ZQ== ------END CERTIFICATE----- - -# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited -# Label: "COMODO ECC Certification Authority" -# Serial: 41578283867086692638256921589707938090 -# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 -# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 -# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 ------BEGIN CERTIFICATE----- -MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL -MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE -BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT -IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw -MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy -ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N -T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv -biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR -FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J -cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW -BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm -fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv -GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= ------END CERTIFICATE----- - -# Issuer: CN=Certigna O=Dhimyotis -# Subject: CN=Certigna O=Dhimyotis -# Label: "Certigna" -# Serial: 18364802974209362175 -# MD5 Fingerprint: ab:57:a6:5b:7d:42:82:19:b5:d8:58:26:28:5e:fd:ff -# SHA1 Fingerprint: b1:2e:13:63:45:86:a4:6f:1a:b2:60:68:37:58:2d:c4:ac:fd:94:97 -# SHA256 Fingerprint: e3:b6:a2:db:2e:d7:ce:48:84:2f:7a:c5:32:41:c7:b7:1d:54:14:4b:fb:40:c1:1f:3f:1d:0b:42:f5:ee:a1:2d ------BEGIN CERTIFICATE----- -MIIDqDCCApCgAwIBAgIJAP7c4wEPyUj/MA0GCSqGSIb3DQEBBQUAMDQxCzAJBgNV -BAYTAkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hMB4X -DTA3MDYyOTE1MTMwNVoXDTI3MDYyOTE1MTMwNVowNDELMAkGA1UEBhMCRlIxEjAQ -BgNVBAoMCURoaW15b3RpczERMA8GA1UEAwwIQ2VydGlnbmEwggEiMA0GCSqGSIb3 -DQEBAQUAA4IBDwAwggEKAoIBAQDIaPHJ1tazNHUmgh7stL7qXOEm7RFHYeGifBZ4 -QCHkYJ5ayGPhxLGWkv8YbWkj4Sti993iNi+RB7lIzw7sebYs5zRLcAglozyHGxny -gQcPOJAZ0xH+hrTy0V4eHpbNgGzOOzGTtvKg0KmVEn2lmsxryIRWijOp5yIVUxbw -zBfsV1/pogqYCd7jX5xv3EjjhQsVWqa6n6xI4wmy9/Qy3l40vhx4XUJbzg4ij02Q -130yGLMLLGq/jj8UEYkgDncUtT2UCIf3JR7VsmAA7G8qKCVuKj4YYxclPz5EIBb2 -JsglrgVKtOdjLPOMFlN+XPsRGgjBRmKfIrjxwo1p3Po6WAbfAgMBAAGjgbwwgbkw -DwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUGu3+QTmQtCRZvgHyUtVF9lo53BEw -ZAYDVR0jBF0wW4AUGu3+QTmQtCRZvgHyUtVF9lo53BGhOKQ2MDQxCzAJBgNVBAYT -AkZSMRIwEAYDVQQKDAlEaGlteW90aXMxETAPBgNVBAMMCENlcnRpZ25hggkA/tzj -AQ/JSP8wDgYDVR0PAQH/BAQDAgEGMBEGCWCGSAGG+EIBAQQEAwIABzANBgkqhkiG -9w0BAQUFAAOCAQEAhQMeknH2Qq/ho2Ge6/PAD/Kl1NqV5ta+aDY9fm4fTIrv0Q8h -bV6lUmPOEvjvKtpv6zf+EwLHyzs+ImvaYS5/1HI93TDhHkxAGYwP15zRgzB7mFnc -fca5DClMoTOi62c6ZYTTluLtdkVwj7Ur3vkj1kluPBS1xp81HlDQwY9qcEQCYsuu -HWhBp6pX6FOqB9IG9tUUBguRA3UsbHK1YZWaDYu5Def131TN3ubY1gkIl2PlwS6w -t0QmwCbAr1UwnjvVNioZBPRcHv/PLLf/0P2HQBHVESO7SMAhqaQoLf0V+LBOK/Qw -WyH8EZE0vkHve52Xdf+XlcCWWC/qu0bXu+TZLg== ------END CERTIFICATE----- - -# Issuer: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Subject: O=Chunghwa Telecom Co., Ltd. OU=ePKI Root Certification Authority -# Label: "ePKI Root Certification Authority" -# Serial: 28956088682735189655030529057352760477 -# MD5 Fingerprint: 1b:2e:00:ca:26:06:90:3d:ad:fe:6f:15:68:d3:6b:b3 -# SHA1 Fingerprint: 67:65:0d:f1:7e:8e:7e:5b:82:40:a4:f4:56:4b:cf:e2:3d:69:c6:f0 -# SHA256 Fingerprint: c0:a6:f4:dc:63:a2:4b:fd:cf:54:ef:2a:6a:08:2a:0a:72:de:35:80:3e:2f:f5:ff:52:7a:e5:d8:72:06:df:d5 ------BEGIN CERTIFICATE----- -MIIFsDCCA5igAwIBAgIQFci9ZUdcr7iXAF7kBtK8nTANBgkqhkiG9w0BAQUFADBe -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xKjAoBgNVBAsMIWVQS0kgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe -Fw0wNDEyMjAwMjMxMjdaFw0zNDEyMjAwMjMxMjdaMF4xCzAJBgNVBAYTAlRXMSMw -IQYDVQQKDBpDaHVuZ2h3YSBUZWxlY29tIENvLiwgTHRkLjEqMCgGA1UECwwhZVBL -SSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA4SUP7o3biDN1Z82tH306Tm2d0y8U82N0ywEhajfqhFAH -SyZbCUNsIZ5qyNUD9WBpj8zwIuQf5/dqIjG3LBXy4P4AakP/h2XGtRrBp0xtInAh -ijHyl3SJCRImHJ7K2RKilTza6We/CKBk49ZCt0Xvl/T29de1ShUCWH2YWEtgvM3X -DZoTM1PRYfl61dd4s5oz9wCGzh1NlDivqOx4UXCKXBCDUSH3ET00hl7lSM2XgYI1 -TBnsZfZrxQWh7kcT1rMhJ5QQCtkkO7q+RBNGMD+XPNjX12ruOzjjK9SXDrkb5wdJ -fzcq+Xd4z1TtW0ado4AOkUPB1ltfFLqfpo0kR0BZv3I4sjZsN/+Z0V0OWQqraffA -sgRFelQArr5T9rXn4fg8ozHSqf4hUmTFpmfwdQcGlBSBVcYn5AGPF8Fqcde+S/uU -WH1+ETOxQvdibBjWzwloPn9s9h6PYq2lY9sJpx8iQkEeb5mKPtf5P0B6ebClAZLS -nT0IFaUQAS2zMnaolQ2zepr7BxB4EW/hj8e6DyUadCrlHJhBmd8hh+iVBmoKs2pH -dmX2Os+PYhcZewoozRrSgx4hxyy/vv9haLdnG7t4TY3OZ+XkwY63I2binZB1NJip -NiuKmpS5nezMirH4JYlcWrYvjB9teSSnUmjDhDXiZo1jDiVN1Rmy5nk3pyKdVDEC -AwEAAaNqMGgwHQYDVR0OBBYEFB4M97Zn8uGSJglFwFU5Lnc/QkqiMAwGA1UdEwQF -MAMBAf8wOQYEZyoHAAQxMC8wLQIBADAJBgUrDgMCGgUAMAcGBWcqAwAABBRFsMLH -ClZ87lt4DJX5GFPBphzYEDANBgkqhkiG9w0BAQUFAAOCAgEACbODU1kBPpVJufGB -uvl2ICO1J2B01GqZNF5sAFPZn/KmsSQHRGoqxqWOeBLoR9lYGxMqXnmbnwoqZ6Yl -PwZpVnPDimZI+ymBV3QGypzqKOg4ZyYr8dW1P2WT+DZdjo2NQCCHGervJ8A9tDkP -JXtoUHRVnAxZfVo9QZQlUgjgRywVMRnVvwdVxrsStZf0X4OFunHB2WyBEXYKCrC/ -gpf36j36+uwtqSiUO1bd0lEursC9CBWMd1I0ltabrNMdjmEPNXubrjlpC2JgQCA2 -j6/7Nu4tCEoduL+bXPjqpRugc6bY+G7gMwRfaKonh+3ZwZCc7b3jajWvY9+rGNm6 -5ulK6lCKD2GTHuItGeIwlDWSXQ62B68ZgI9HkFFLLk3dheLSClIKF5r8GrBQAuUB -o2M3IUxExJtRmREOc5wGj1QupyheRDmHVi03vYVElOEMSyycw5KFNGHLD7ibSkNS -/jQ6fbjpKdx2qcgw+BRxgMYeNkh0IkFch4LoGHGLQYlE535YW6i4jRPpp2zDR+2z -Gp1iro2C6pSe3VkQw63d4k3jMdXH7OjysP6SHhYKGvzZ8/gntsm+HbRsZJB/9OTE -W9c3rkIO3aQab3yIVMUWbuF6aC74Or8NpDyJO3inTmODBCEIZ43ygknQW/2xzQ+D -hNQ+IIX3Sj0rnP0qCglN6oH4EZw= ------END CERTIFICATE----- - -# Issuer: O=certSIGN OU=certSIGN ROOT CA -# Subject: O=certSIGN OU=certSIGN ROOT CA -# Label: "certSIGN ROOT CA" -# Serial: 35210227249154 -# MD5 Fingerprint: 18:98:c0:d6:e9:3a:fc:f9:b0:f5:0c:f7:4b:01:44:17 -# SHA1 Fingerprint: fa:b7:ee:36:97:26:62:fb:2d:b0:2a:f6:bf:03:fd:e8:7c:4b:2f:9b -# SHA256 Fingerprint: ea:a9:62:c4:fa:4a:6b:af:eb:e4:15:19:6d:35:1c:cd:88:8d:4f:53:f3:fa:8a:e6:d7:c4:66:a9:4e:60:42:bb ------BEGIN CERTIFICATE----- -MIIDODCCAiCgAwIBAgIGIAYFFnACMA0GCSqGSIb3DQEBBQUAMDsxCzAJBgNVBAYT -AlJPMREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBD -QTAeFw0wNjA3MDQxNzIwMDRaFw0zMTA3MDQxNzIwMDRaMDsxCzAJBgNVBAYTAlJP -MREwDwYDVQQKEwhjZXJ0U0lHTjEZMBcGA1UECxMQY2VydFNJR04gUk9PVCBDQTCC -ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALczuX7IJUqOtdu0KBuqV5Do -0SLTZLrTk+jUrIZhQGpgV2hUhE28alQCBf/fm5oqrl0Hj0rDKH/v+yv6efHHrfAQ -UySQi2bJqIirr1qjAOm+ukbuW3N7LBeCgV5iLKECZbO9xSsAfsT8AzNXDe3i+s5d -RdY4zTW2ssHQnIFKquSyAVwdj1+ZxLGt24gh65AIgoDzMKND5pCCrlUoSe1b16kQ -OA7+j0xbm0bqQfWwCHTD0IgztnzXdN/chNFDDnU5oSVAKOp4yw4sLjmdjItuFhwv -JoIQ4uNllAoEwF73XVv4EOLQunpL+943AAAaWyjj0pxzPjKHmKHJUS/X3qwzs08C -AwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAcYwHQYDVR0O -BBYEFOCMm9slSbPxfIbWskKHC9BroNnkMA0GCSqGSIb3DQEBBQUAA4IBAQA+0hyJ -LjX8+HXd5n9liPRyTMks1zJO890ZeUe9jjtbkw9QSSQTaxQGcu8J06Gh40CEyecY -MnQ8SG4Pn0vU9x7Tk4ZkVJdjclDVVc/6IJMCopvDI5NOFlV2oHB5bc0hH88vLbwZ -44gx+FkagQnIl6Z0x2DEW8xXjrJ1/RsCCdtZb3KTafcxQdaIOL+Hsr0Wefmq5L6I -Jd1hJyMctTEHBDa0GpC9oHRxUIltvBTjD4au8as+x6AJzKNI0eDbZOeStc+vckNw -i/nDhDwTqn6Sm1dTk/pwwpEOMfmbZ13pljheX7NzTogVZ96edhBiIL5VaZVDADlN -9u6wWk5JRFRYX0KD ------END CERTIFICATE----- - -# Issuer: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Subject: CN=NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny O=NetLock Kft. OU=Tan\xfas\xedtv\xe1nykiad\xf3k (Certification Services) -# Label: "NetLock Arany (Class Gold) F\u0151tan\xfas\xedtv\xe1ny" -# Serial: 80544274841616 -# MD5 Fingerprint: c5:a1:b7:ff:73:dd:d6:d7:34:32:18:df:fc:3c:ad:88 -# SHA1 Fingerprint: 06:08:3f:59:3f:15:a1:04:a0:69:a4:6b:a9:03:d0:06:b7:97:09:91 -# SHA256 Fingerprint: 6c:61:da:c3:a2:de:f0:31:50:6b:e0:36:d2:a6:fe:40:19:94:fb:d1:3d:f9:c8:d4:66:59:92:74:c4:46:ec:98 ------BEGIN CERTIFICATE----- -MIIEFTCCAv2gAwIBAgIGSUEs5AAQMA0GCSqGSIb3DQEBCwUAMIGnMQswCQYDVQQG -EwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFTATBgNVBAoMDE5ldExvY2sgS2Z0LjE3 -MDUGA1UECwwuVGFuw7pzw610dsOhbnlraWFkw7NrIChDZXJ0aWZpY2F0aW9uIFNl -cnZpY2VzKTE1MDMGA1UEAwwsTmV0TG9jayBBcmFueSAoQ2xhc3MgR29sZCkgRsWR -dGFuw7pzw610dsOhbnkwHhcNMDgxMjExMTUwODIxWhcNMjgxMjA2MTUwODIxWjCB -pzELMAkGA1UEBhMCSFUxETAPBgNVBAcMCEJ1ZGFwZXN0MRUwEwYDVQQKDAxOZXRM -b2NrIEtmdC4xNzA1BgNVBAsMLlRhbsO6c8OtdHbDoW55a2lhZMOzayAoQ2VydGlm -aWNhdGlvbiBTZXJ2aWNlcykxNTAzBgNVBAMMLE5ldExvY2sgQXJhbnkgKENsYXNz -IEdvbGQpIEbFkXRhbsO6c8OtdHbDoW55MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEAxCRec75LbRTDofTjl5Bu0jBFHjzuZ9lk4BqKf8owyoPjIMHj9DrT -lF8afFttvzBPhCf2nx9JvMaZCpDyD/V/Q4Q3Y1GLeqVw/HpYzY6b7cNGbIRwXdrz -AZAj/E4wqX7hJ2Pn7WQ8oLjJM2P+FpD/sLj916jAwJRDC7bVWaaeVtAkH3B5r9s5 -VA1lddkVQZQBr17s9o3x/61k/iCa11zr/qYfCGSji3ZVrR47KGAuhyXoqq8fxmRG -ILdwfzzeSNuWU7c5d+Qa4scWhHaXWy+7GRWF+GmF9ZmnqfI0p6m2pgP8b4Y9VHx2 -BJtr+UBdADTHLpl1neWIA6pN+APSQnbAGwIDAKiLo0UwQzASBgNVHRMBAf8ECDAG -AQH/AgEEMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUzPpnk/C2uNClwB7zU/2M -U9+D15YwDQYJKoZIhvcNAQELBQADggEBAKt/7hwWqZw8UQCgwBEIBaeZ5m8BiFRh -bvG5GK1Krf6BQCOUL/t1fC8oS2IkgYIL9WHxHG64YTjrgfpioTtaYtOUZcTh5m2C -+C8lcLIhJsFyUR+MLMOEkMNaj7rP9KdlpeuY0fsFskZ1FSNqb4VjMIDw1Z4fKRzC -bLBQWV2QWzuoDTDPv31/zvGdg73JRm4gpvlhUbohL3u+pRVjodSVh/GeufOJ8z2F -uLjbvrW5KfnaNwUASZQDhETnv0Mxz3WLJdH0pmT1kvarBes96aULNmLazAZfNou2 -XjG4Kvte9nHfRCaexOYNkbQudZWAUWpLMKawYqGT8ZvYzsRjdT9ZR7E= ------END CERTIFICATE----- - -# Issuer: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Subject: CN=SecureSign RootCA11 O=Japan Certification Services, Inc. -# Label: "SecureSign RootCA11" -# Serial: 1 -# MD5 Fingerprint: b7:52:74:e2:92:b4:80:93:f2:75:e4:cc:d7:f2:ea:26 -# SHA1 Fingerprint: 3b:c4:9f:48:f8:f3:73:a0:9c:1e:bd:f8:5b:b1:c3:65:c7:d8:11:b3 -# SHA256 Fingerprint: bf:0f:ee:fb:9e:3a:58:1a:d5:f9:e9:db:75:89:98:57:43:d2:61:08:5c:4d:31:4f:6f:5d:72:59:aa:42:16:12 ------BEGIN CERTIFICATE----- -MIIDbTCCAlWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBYMQswCQYDVQQGEwJKUDEr -MCkGA1UEChMiSmFwYW4gQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcywgSW5jLjEcMBoG -A1UEAxMTU2VjdXJlU2lnbiBSb290Q0ExMTAeFw0wOTA0MDgwNDU2NDdaFw0yOTA0 -MDgwNDU2NDdaMFgxCzAJBgNVBAYTAkpQMSswKQYDVQQKEyJKYXBhbiBDZXJ0aWZp -Y2F0aW9uIFNlcnZpY2VzLCBJbmMuMRwwGgYDVQQDExNTZWN1cmVTaWduIFJvb3RD -QTExMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA/XeqpRyQBTvLTJsz -i1oURaTnkBbR31fSIRCkF/3frNYfp+TbfPfs37gD2pRY/V1yfIw/XwFndBWW4wI8 -h9uuywGOwvNmxoVF9ALGOrVisq/6nL+k5tSAMJjzDbaTj6nU2DbysPyKyiyhFTOV -MdrAG/LuYpmGYz+/3ZMqg6h2uRMft85OQoWPIucuGvKVCbIFtUROd6EgvanyTgp9 -UK31BQ1FT0Zx/Sg+U/sE2C3XZR1KG/rPO7AxmjVuyIsG0wCR8pQIZUyxNAYAeoni -8McDWc/V1uinMrPmmECGxc0nEovMe863ETxiYAcjPitAbpSACW22s293bzUIUPsC -h8U+iQIDAQABo0IwQDAdBgNVHQ4EFgQUW/hNT7KlhtQ60vFjmqC+CfZXt94wDgYD -VR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB -AKChOBZmLqdWHyGcBvod7bkixTgm2E5P7KN/ed5GIaGHd48HCJqypMWvDzKYC3xm -KbabfSVSSUOrTC4rbnpwrxYO4wJs+0LmGJ1F2FXI6Dvd5+H0LgscNFxsWEr7jIhQ -X5Ucv+2rIrVls4W6ng+4reV6G4pQOh29Dbx7VFALuUKvVaAYga1lme++5Jy/xIWr -QbJUb9wlze144o4MjQlJ3WN7WmmWAiGovVJZ6X01y8hSyn+B/tlr0/cR7SXf+Of5 -pPpyl4RTDaXQMhhRdlkUbA/r7F+AjHVDg8OFmP9Mni0N5HeDk061lgeLKBObjBmN -QSdJQO7e5iNEOdyhIta6A/I= ------END CERTIFICATE----- - -# Issuer: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Subject: CN=Microsec e-Szigno Root CA 2009 O=Microsec Ltd. -# Label: "Microsec e-Szigno Root CA 2009" -# Serial: 14014712776195784473 -# MD5 Fingerprint: f8:49:f4:03:bc:44:2d:83:be:48:69:7d:29:64:fc:b1 -# SHA1 Fingerprint: 89:df:74:fe:5c:f4:0f:4a:80:f9:e3:37:7d:54:da:91:e1:01:31:8e -# SHA256 Fingerprint: 3c:5f:81:fe:a5:fa:b8:2c:64:bf:a2:ea:ec:af:cd:e8:e0:77:fc:86:20:a7:ca:e5:37:16:3d:f3:6e:db:f3:78 ------BEGIN CERTIFICATE----- -MIIECjCCAvKgAwIBAgIJAMJ+QwRORz8ZMA0GCSqGSIb3DQEBCwUAMIGCMQswCQYD -VQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3QxFjAUBgNVBAoMDU1pY3Jvc2VjIEx0 -ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3ppZ25vIFJvb3QgQ0EgMjAwOTEfMB0G -CSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5odTAeFw0wOTA2MTYxMTMwMThaFw0y -OTEyMzAxMTMwMThaMIGCMQswCQYDVQQGEwJIVTERMA8GA1UEBwwIQnVkYXBlc3Qx -FjAUBgNVBAoMDU1pY3Jvc2VjIEx0ZC4xJzAlBgNVBAMMHk1pY3Jvc2VjIGUtU3pp -Z25vIFJvb3QgQ0EgMjAwOTEfMB0GCSqGSIb3DQEJARYQaW5mb0BlLXN6aWduby5o -dTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAOn4j/NjrdqG2KfgQvvP -kd6mJviZpWNwrZuuyjNAfW2WbqEORO7hE52UQlKavXWFdCyoDh2Tthi3jCyoz/tc -cbna7P7ofo/kLx2yqHWH2Leh5TvPmUpG0IMZfcChEhyVbUr02MelTTMuhTlAdX4U -fIASmFDHQWe4oIBhVKZsTh/gnQ4H6cm6M+f+wFUoLAKApxn1ntxVUwOXewdI/5n7 -N4okxFnMUBBjjqqpGrCEGob5X7uxUG6k0QrM1XF+H6cbfPVTbiJfyyvm1HxdrtbC -xkzlBQHZ7Vf8wSN5/PrIJIOV87VqUQHQd9bpEqH5GoP7ghu5sJf0dgYzQ0mg/wu1 -+rUCAwEAAaOBgDB+MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBTLD8bfQkPMPcu1SCOhGnqmKrs0aDAfBgNVHSMEGDAWgBTLD8bfQkPM -Pcu1SCOhGnqmKrs0aDAbBgNVHREEFDASgRBpbmZvQGUtc3ppZ25vLmh1MA0GCSqG -SIb3DQEBCwUAA4IBAQDJ0Q5eLtXMs3w+y/w9/w0olZMEyL/azXm4Q5DwpL7v8u8h -mLzU1F0G9u5C7DBsoKqpyvGvivo/C3NqPuouQH4frlRheesuCDfXI/OMn74dseGk -ddug4lQUsbocKaQY9hK6ohQU4zE1yED/t+AFdlfBHFny+L/k7SViXITwfn4fs775 -tyERzAMBVnCnEJIeGzSBHq2cGsMEPO0CYdYeBvNfOofyK/FFh+U9rNHHV4S9a67c -2Pm2G2JwCz02yULyMtd6YebS2z3PyKnJm9zbWETXbzivf3jTo60adbocwTZ8jx5t -HMN1Rq41Bab2XD0h7lbwyYIiLXpUq3DDfSJlgnCW ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 -# Label: "GlobalSign Root CA - R3" -# Serial: 4835703278459759426209954 -# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 -# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad -# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b ------BEGIN CERTIFICATE----- -MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G -A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp -Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 -MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG -A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 -RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT -gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm -KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd -QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ -XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw -DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o -LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU -RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp -jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK -6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX -mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs -Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH -WD9f ------END CERTIFICATE----- - -# Issuer: CN=Izenpe.com O=IZENPE S.A. -# Subject: CN=Izenpe.com O=IZENPE S.A. -# Label: "Izenpe.com" -# Serial: 917563065490389241595536686991402621 -# MD5 Fingerprint: a6:b0:cd:85:80:da:5c:50:34:a3:39:90:2f:55:67:73 -# SHA1 Fingerprint: 2f:78:3d:25:52:18:a7:4a:65:39:71:b5:2c:a2:9c:45:15:6f:e9:19 -# SHA256 Fingerprint: 25:30:cc:8e:98:32:15:02:ba:d9:6f:9b:1f:ba:1b:09:9e:2d:29:9e:0f:45:48:bb:91:4f:36:3b:c0:d4:53:1f ------BEGIN CERTIFICATE----- -MIIF8TCCA9mgAwIBAgIQALC3WhZIX7/hy/WL1xnmfTANBgkqhkiG9w0BAQsFADA4 -MQswCQYDVQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6 -ZW5wZS5jb20wHhcNMDcxMjEzMTMwODI4WhcNMzcxMjEzMDgyNzI1WjA4MQswCQYD -VQQGEwJFUzEUMBIGA1UECgwLSVpFTlBFIFMuQS4xEzARBgNVBAMMCkl6ZW5wZS5j -b20wggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDJ03rKDx6sp4boFmVq -scIbRTJxldn+EFvMr+eleQGPicPK8lVx93e+d5TzcqQsRNiekpsUOqHnJJAKClaO -xdgmlOHZSOEtPtoKct2jmRXagaKH9HtuJneJWK3W6wyyQXpzbm3benhB6QiIEn6H -LmYRY2xU+zydcsC8Lv/Ct90NduM61/e0aL6i9eOBbsFGb12N4E3GVFWJGjMxCrFX -uaOKmMPsOzTFlUFpfnXCPCDFYbpRR6AgkJOhkEvzTnyFRVSa0QUmQbC1TR0zvsQD -yCV8wXDbO/QJLVQnSKwv4cSsPsjLkkxTOTcj7NMB+eAJRE1NZMDhDVqHIrytG6P+ -JrUV86f8hBnp7KGItERphIPzidF0BqnMC9bC3ieFUCbKF7jJeodWLBoBHmy+E60Q -rLUk9TiRodZL2vG70t5HtfG8gfZZa88ZU+mNFctKy6lvROUbQc/hhqfK0GqfvEyN -BjNaooXlkDWgYlwWTvDjovoDGrQscbNYLN57C9saD+veIR8GdwYDsMnvmfzAuU8L -hij+0rnq49qlw0dpEuDb8PYZi+17cNcC1u2HGCgsBCRMd+RIihrGO5rUD8r6ddIB -QFqNeb+Lz0vPqhbBleStTIo+F5HUsWLlguWABKQDfo2/2n+iD5dPDNMN+9fR5XJ+ -HMh3/1uaD7euBUbl8agW7EekFwIDAQABo4H2MIHzMIGwBgNVHREEgagwgaWBD2lu -Zm9AaXplbnBlLmNvbaSBkTCBjjFHMEUGA1UECgw+SVpFTlBFIFMuQS4gLSBDSUYg -QTAxMzM3MjYwLVJNZXJjLlZpdG9yaWEtR2FzdGVpeiBUMTA1NSBGNjIgUzgxQzBB -BgNVBAkMOkF2ZGEgZGVsIE1lZGl0ZXJyYW5lbyBFdG9yYmlkZWEgMTQgLSAwMTAx -MCBWaXRvcmlhLUdhc3RlaXowDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AQYwHQYDVR0OBBYEFB0cZQ6o8iV7tJHP5LGx5r1VdGwFMA0GCSqGSIb3DQEBCwUA -A4ICAQB4pgwWSp9MiDrAyw6lFn2fuUhfGI8NYjb2zRlrrKvV9pF9rnHzP7MOeIWb -laQnIUdCSnxIOvVFfLMMjlF4rJUT3sb9fbgakEyrkgPH7UIBzg/YsfqikuFgba56 -awmqxinuaElnMIAkejEWOVt+8Rwu3WwJrfIxwYJOubv5vr8qhT/AQKM6WfxZSzwo -JNu0FXWuDYi6LnPAvViH5ULy617uHjAimcs30cQhbIHsvm0m5hzkQiCeR7Csg1lw -LDXWrzY0tM07+DKo7+N4ifuNRSzanLh+QBxh5z6ikixL8s36mLYp//Pye6kfLqCT -VyvehQP5aTfLnnhqBbTFMXiJ7HqnheG5ezzevh55hM6fcA5ZwjUukCox2eRFekGk -LhObNA5me0mrZJfQRsN5nXJQY6aYWwa9SG3YOYNw6DXwBdGqvOPbyALqfP2C2sJb -UjWumDqtujWTI6cfSN01RpiyEGjkpTHCClguGYEQyVB1/OpaFs4R1+7vUIgtYf8/ -QnMFlEPVjjxOAToZpR9GTnfQXeWBIiGH/pR9hNiTrdZoQ0iy2+tzJOeRf1SktoA+ -naM8THLCV8Sg1Mw4J87VBp6iSNnpn86CcDaTmjvfliHjWbcM2pE38P1ZWrOZyGls -QyYBNWNgVYkDOnXYukrZVP/u3oDYLdE41V4tC5h9Pmzb/CaIxw== ------END CERTIFICATE----- - -# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. -# Label: "Go Daddy Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 -# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b -# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da ------BEGIN CERTIFICATE----- -MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT -EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp -ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz -NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH -EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE -AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw -DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD -E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH -/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy -DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh -GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR -tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA -AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX -WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu -9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr -gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo -2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO -LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI -4uJEvlz36hz1 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 -# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e -# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 ------BEGIN CERTIFICATE----- -MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs -ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw -MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 -b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj -aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp -Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC -ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg -nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 -HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N -Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN -dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 -HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO -BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G -CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU -sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 -4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg -8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K -pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 -mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 ------END CERTIFICATE----- - -# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. -# Label: "Starfield Services Root Certificate Authority - G2" -# Serial: 0 -# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 -# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f -# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 ------BEGIN CERTIFICATE----- -MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx -EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT -HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs -ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 -MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD -VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy -ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy -dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p -OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 -8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K -Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe -hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk -6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw -DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q -AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI -bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB -ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z -qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd -iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn -0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN -sSi6 ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Commercial O=AffirmTrust -# Subject: CN=AffirmTrust Commercial O=AffirmTrust -# Label: "AffirmTrust Commercial" -# Serial: 8608355977964138876 -# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 -# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 -# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP -Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr -ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL -MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 -yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr -VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ -nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG -XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj -vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt -Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g -N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC -nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Networking O=AffirmTrust -# Subject: CN=AffirmTrust Networking O=AffirmTrust -# Label: "AffirmTrust Networking" -# Serial: 8957382827206547757 -# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f -# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f -# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b ------BEGIN CERTIFICATE----- -MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz -dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL -MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp -cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y -YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua -kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL -QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp -6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG -yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i -QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ -KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO -tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu -QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ -Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u -olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 -x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium O=AffirmTrust -# Subject: CN=AffirmTrust Premium O=AffirmTrust -# Label: "AffirmTrust Premium" -# Serial: 7893706540734352110 -# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 -# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 -# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE -BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz -dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG -A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U -cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf -qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ -JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ -+jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS -s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 -HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 -70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG -V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S -qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S -5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia -C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX -OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE -FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 -KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg -Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B -8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ -MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc -0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ -u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF -u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH -YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 -GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO -RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e -KeC2uAloGRwYQw== ------END CERTIFICATE----- - -# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust -# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust -# Label: "AffirmTrust Premium ECC" -# Serial: 8401224907861490260 -# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d -# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb -# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 ------BEGIN CERTIFICATE----- -MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC -VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ -cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ -BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt -VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D -0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 -ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G -A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs -aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I -flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA" -# Serial: 279744 -# MD5 Fingerprint: d5:e9:81:40:c5:18:69:fc:46:2c:89:75:62:0f:aa:78 -# SHA1 Fingerprint: 07:e0:32:e0:20:b7:2c:3f:19:2f:06:28:a2:59:3a:19:a7:0f:06:9e -# SHA256 Fingerprint: 5c:58:46:8d:55:f5:8e:49:7e:74:39:82:d2:b5:00:10:b6:d1:65:37:4a:cf:83:a7:d4:a3:2d:b7:68:c4:40:8e ------BEGIN CERTIFICATE----- -MIIDuzCCAqOgAwIBAgIDBETAMA0GCSqGSIb3DQEBBQUAMH4xCzAJBgNVBAYTAlBM -MSIwIAYDVQQKExlVbml6ZXRvIFRlY2hub2xvZ2llcyBTLkEuMScwJQYDVQQLEx5D -ZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxIjAgBgNVBAMTGUNlcnR1bSBU -cnVzdGVkIE5ldHdvcmsgQ0EwHhcNMDgxMDIyMTIwNzM3WhcNMjkxMjMxMTIwNzM3 -WjB+MQswCQYDVQQGEwJQTDEiMCAGA1UEChMZVW5pemV0byBUZWNobm9sb2dpZXMg -Uy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MSIw -IAYDVQQDExlDZXJ0dW0gVHJ1c3RlZCBOZXR3b3JrIENBMIIBIjANBgkqhkiG9w0B -AQEFAAOCAQ8AMIIBCgKCAQEA4/t9o3K6wvDJFIf1awFO4W5AB7ptJ11/91sts1rH -UV+rpDKmYYe2bg+G0jACl/jXaVehGDldamR5xgFZrDwxSjh80gTSSyjoIF87B6LM -TXPb865Px1bVWqeWifrzq2jUI4ZZJ88JJ7ysbnKDHDBy3+Ci6dLhdHUZvSqeexVU -BBvXQzmtVSjF4hq79MDkrjhJM8x2hZ85RdKknvISjFH4fOQtf/WsX+sWn7Et0brM -kUJ3TCXJkDhv2/DM+44el1k+1WBO5gUo7Ul5E0u6SNsv+XLTOcr+H9g0cvW0QM8x -AcPs3hEtF10fuFDRXhmnad4HMyjKUJX5p1TLVIZQRan5SQIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBQIds3LB/8k9sXN7buQvOKEN0Z19zAOBgNV -HQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQEFBQADggEBAKaorSLOAT2mo/9i0Eidi15y -sHhE49wcrwn9I0j6vSrEuVUEtRCjjSfeC4Jj0O7eDDd5QVsisrCaQVymcODU0HfL -I9MA4GxWL+FpDQ3Zqr8hgVDZBqWo/5U30Kr+4rP1mS1FhIrlQgnXdAIv94nYmem8 -J9RHjboNRhx3zxSkHLmkMcScKHQDNP8zGSal6Q10tz6XxnboJ5ajZt3hrvJBW8qY -VoNzcOSGGtIxQbovvi0TWnZvTuhOgQ4/WwMioBK+ZlgRSssDxLQqKi2WF+A5VLxI -03YnnZotBqbJ7DnSq9ufmgsnAjUpsUCV5/nonFWIGUbWtzT1fs45mtk48VH3Tyw= ------END CERTIFICATE----- - -# Issuer: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Root Certification Authority O=TAIWAN-CA OU=Root CA -# Label: "TWCA Root Certification Authority" -# Serial: 1 -# MD5 Fingerprint: aa:08:8f:f6:f9:7b:b7:f2:b1:a7:1e:9b:ea:ea:bd:79 -# SHA1 Fingerprint: cf:9e:87:6d:d3:eb:fc:42:26:97:a3:b5:a3:7a:a0:76:a9:06:23:48 -# SHA256 Fingerprint: bf:d8:8f:e1:10:1c:41:ae:3e:80:1b:f8:be:56:35:0e:e9:ba:d1:a6:b9:bd:51:5e:dc:5c:6d:5b:87:11:ac:44 ------BEGIN CERTIFICATE----- -MIIDezCCAmOgAwIBAgIBATANBgkqhkiG9w0BAQUFADBfMQswCQYDVQQGEwJUVzES -MBAGA1UECgwJVEFJV0FOLUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFU -V0NBIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwODI4MDcyNDMz -WhcNMzAxMjMxMTU1OTU5WjBfMQswCQYDVQQGEwJUVzESMBAGA1UECgwJVEFJV0FO -LUNBMRAwDgYDVQQLDAdSb290IENBMSowKAYDVQQDDCFUV0NBIFJvb3QgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB -AQCwfnK4pAOU5qfeCTiRShFAh6d8WWQUe7UREN3+v9XAu1bihSX0NXIP+FPQQeFE -AcK0HMMxQhZHhTMidrIKbw/lJVBPhYa+v5guEGcevhEFhgWQxFnQfHgQsIBct+HH -K3XLfJ+utdGdIzdjp9xCoi2SBBtQwXu4PhvJVgSLL1KbralW6cH/ralYhzC2gfeX -RfwZVzsrb+RH9JlF/h3x+JejiB03HFyP4HYlmlD4oFT/RJB2I9IyxsOrBr/8+7/z -rX2SYgJbKdM1o5OaQ2RgXbL6Mv87BK9NQGr5x+PvI/1ry+UPizgN7gr8/g+YnzAx -3WxSZfmLgb4i4RxYA7qRG4kHAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqOFsmjd6LWvJPelSDGRjjCDWmujANBgkq -hkiG9w0BAQUFAAOCAQEAPNV3PdrfibqHDAhUaiBQkr6wQT25JmSDCi/oQMCXKCeC -MErJk/9q56YAf4lCmtYR5VPOL8zy2gXE/uJQxDqGfczafhAJO5I1KlOy/usrBdls -XebQ79NqZp4VKIV66IIArB6nCWlWQtNoURi+VJq/REG6Sb4gumlc7rh3zc5sH62D -lhh9DrUUOYTxKOkto557HnpyWoOzeW/vtPzQCqVYT0bf+215WfKEIlKuD8z7fDvn -aspHYcN6+NOSBB+4IIThNlQWx0DeO4pz3N/GCUzf7Nr/1FNCocnyYh0igzyXxfkZ -YiesZSLX0zzG5Y6yU8xJzrww/nsOM5D77dIUkR8Hrw== ------END CERTIFICATE----- - -# Issuer: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Subject: O=SECOM Trust Systems CO.,LTD. OU=Security Communication RootCA2 -# Label: "Security Communication RootCA2" -# Serial: 0 -# MD5 Fingerprint: 6c:39:7d:a4:0e:55:59:b2:3f:d6:41:b1:12:50:de:43 -# SHA1 Fingerprint: 5f:3b:8c:f2:f8:10:b3:7d:78:b4:ce:ec:19:19:c3:73:34:b9:c7:74 -# SHA256 Fingerprint: 51:3b:2c:ec:b8:10:d4:cd:e5:dd:85:39:1a:df:c6:c2:dd:60:d8:7b:b7:36:d2:b5:21:48:4a:a4:7a:0e:be:f6 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIBADANBgkqhkiG9w0BAQsFADBdMQswCQYDVQQGEwJKUDEl -MCMGA1UEChMcU0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UECxMe -U2VjdXJpdHkgQ29tbXVuaWNhdGlvbiBSb290Q0EyMB4XDTA5MDUyOTA1MDAzOVoX -DTI5MDUyOTA1MDAzOVowXTELMAkGA1UEBhMCSlAxJTAjBgNVBAoTHFNFQ09NIFRy -dXN0IFN5c3RlbXMgQ08uLExURC4xJzAlBgNVBAsTHlNlY3VyaXR5IENvbW11bmlj -YXRpb24gUm9vdENBMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANAV -OVKxUrO6xVmCxF1SrjpDZYBLx/KWvNs2l9amZIyoXvDjChz335c9S672XewhtUGr -zbl+dp+++T42NKA7wfYxEUV0kz1XgMX5iZnK5atq1LXaQZAQwdbWQonCv/Q4EpVM -VAX3NuRFg3sUZdbcDE3R3n4MqzvEFb46VqZab3ZpUql6ucjrappdUtAtCms1FgkQ -hNBqyjoGADdH5H5XTz+L62e4iKrFvlNVspHEfbmwhRkGeC7bYRr6hfVKkaHnFtWO -ojnflLhwHyg/i/xAXmODPIMqGplrz95Zajv8bxbXH/1KEOtOghY6rCcMU/Gt1SSw -awNQwS08Ft1ENCcadfsCAwEAAaNCMEAwHQYDVR0OBBYEFAqFqXdlBZh8QIH4D5cs -OPEK7DzPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3 -DQEBCwUAA4IBAQBMOqNErLlFsceTfsgLCkLfZOoc7llsCLqJX2rKSpWeeo8HxdpF -coJxDjrSzG+ntKEju/Ykn8sX/oymzsLS28yN/HH8AynBbF0zX2S2ZTuJbxh2ePXc -okgfGT+Ok+vx+hfuzU7jBBJV1uXk3fs+BXziHV7Gp7yXT2g69ekuCkO2r1dcYmh8 -t/2jioSgrGK+KwmHNPBqAbubKVY8/gA3zyNs8U6qtnRGEmyR7jTV7JqR50S+kDFy -1UkC9gLl9B/rfNmWVan/7Ir5mUf/NVoCqgTLiluHcSmRvaS0eg29mvVXIwAHIRc/ -SjnRBUkLp7Y3gaVdjKozXoEofKd9J+sAro03 ------END CERTIFICATE----- - -# Issuer: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Subject: CN=Actalis Authentication Root CA O=Actalis S.p.A./03358520967 -# Label: "Actalis Authentication Root CA" -# Serial: 6271844772424770508 -# MD5 Fingerprint: 69:c1:0d:4f:07:a3:1b:c3:fe:56:3d:04:bc:11:f6:a6 -# SHA1 Fingerprint: f3:73:b3:87:06:5a:28:84:8a:f2:f3:4a:ce:19:2b:dd:c7:8e:9c:ac -# SHA256 Fingerprint: 55:92:60:84:ec:96:3a:64:b9:6e:2a:be:01:ce:0b:a8:6a:64:fb:fe:bc:c7:aa:b5:af:c1:55:b3:7f:d7:60:66 ------BEGIN CERTIFICATE----- -MIIFuzCCA6OgAwIBAgIIVwoRl0LE48wwDQYJKoZIhvcNAQELBQAwazELMAkGA1UE -BhMCSVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8w -MzM1ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290 -IENBMB4XDTExMDkyMjExMjIwMloXDTMwMDkyMjExMjIwMlowazELMAkGA1UEBhMC -SVQxDjAMBgNVBAcMBU1pbGFuMSMwIQYDVQQKDBpBY3RhbGlzIFMucC5BLi8wMzM1 -ODUyMDk2NzEnMCUGA1UEAwweQWN0YWxpcyBBdXRoZW50aWNhdGlvbiBSb290IENB -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAp8bEpSmkLO/lGMWwUKNv -UTufClrJwkg4CsIcoBh/kbWHuUA/3R1oHwiD1S0eiKD4j1aPbZkCkpAW1V8IbInX -4ay8IMKx4INRimlNAJZaby/ARH6jDuSRzVju3PvHHkVH3Se5CAGfpiEd9UEtL0z9 -KK3giq0itFZljoZUj5NDKd45RnijMCO6zfB9E1fAXdKDa0hMxKufgFpbOr3JpyI/ -gCczWw63igxdBzcIy2zSekciRDXFzMwujt0q7bd9Zg1fYVEiVRvjRuPjPdA1Yprb -rxTIW6HMiRvhMCb8oJsfgadHHwTrozmSBp+Z07/T6k9QnBn+locePGX2oxgkg4YQ -51Q+qDp2JE+BIcXjDwL4k5RHILv+1A7TaLndxHqEguNTVHnd25zS8gebLra8Pu2F -be8lEfKXGkJh90qX6IuxEAf6ZYGyojnP9zz/GPvG8VqLWeICrHuS0E4UT1lF9gxe -KF+w6D9Fz8+vm2/7hNN3WpVvrJSEnu68wEqPSpP4RCHiMUVhUE4Q2OM1fEwZtN4F -v6MGn8i1zeQf1xcGDXqVdFUNaBr8EBtiZJ1t4JWgw5QHVw0U5r0F+7if5t+L4sbn -fpb2U8WANFAoWPASUHEXMLrmeGO89LKtmyuy/uE5jF66CyCU3nuDuP/jVo23Eek7 -jPKxwV2dpAtMK9myGPW1n0sCAwEAAaNjMGEwHQYDVR0OBBYEFFLYiDrIn3hm7Ynz -ezhwlMkCAjbQMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUUtiIOsifeGbt -ifN7OHCUyQICNtAwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3DQEBCwUAA4ICAQAL -e3KHwGCmSUyIWOYdiPcUZEim2FgKDk8TNd81HdTtBjHIgT5q1d07GjLukD0R0i70 -jsNjLiNmsGe+b7bAEzlgqqI0JZN1Ut6nna0Oh4lScWoWPBkdg/iaKWW+9D+a2fDz -WochcYBNy+A4mz+7+uAwTc+G02UQGRjRlwKxK3JCaKygvU5a2hi/a5iB0P2avl4V -SM0RFbnAKVy06Ij3Pjaut2L9HmLecHgQHEhb2rykOLpn7VU+Xlff1ANATIGk0k9j -pwlCCRT8AKnCgHNPLsBA2RF7SOp6AsDT6ygBJlh0wcBzIm2Tlf05fbsq4/aC4yyX -X04fkZT6/iyj2HYauE2yOE+b+h1IYHkm4vP9qdCa6HCPSXrW5b0KDtst842/6+Ok -fcvHlXHo2qN8xcL4dJIEG4aspCJTQLas/kx2z/uUMsA1n3Y/buWQbqCmJqK4LL7R -K4X9p2jIugErsWx0Hbhzlefut8cl8ABMALJ+tguLHPPAUJ4lueAI3jZm/zel0btU -ZCzJJ7VLkn5l/9Mt4blOvH+kQSGQQXemOR/qnuOf0GZvBeyqdn6/axag67XH/JJU -LysRJyU3eExRarDzzFhdFPFqSBX/wge2sY0PjlxQRrM9vwGYT7JZVEc+NHt4bVaT -LnPqZih4zR0Uv6CPLy64Lo7yFIrM6bV8+2ydDKXhlg== ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 2 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 2 Root CA" -# Serial: 2 -# MD5 Fingerprint: 46:a7:d2:fe:45:fb:64:5a:a8:59:90:9b:78:44:9b:29 -# SHA1 Fingerprint: 49:0a:75:74:de:87:0a:47:fe:58:ee:f6:c7:6b:eb:c6:0b:12:40:99 -# SHA256 Fingerprint: 9a:11:40:25:19:7c:5b:b9:5d:94:e6:3d:55:cd:43:79:08:47:b6:46:b2:3c:df:11:ad:a4:a0:0e:ff:15:fb:48 ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMiBSb290IENBMB4XDTEwMTAyNjA4MzgwM1oXDTQwMTAyNjA4MzgwM1ow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDIgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBANfHXvfBB9R3+0Mh9PT1aeTuMgHbo4Yf5FkNuud1g1Lr -6hxhFUi7HQfKjK6w3Jad6sNgkoaCKHOcVgb/S2TwDCo3SbXlzwx87vFKu3MwZfPV -L4O2fuPn9Z6rYPnT8Z2SdIrkHJasW4DptfQxh6NR/Md+oW+OU3fUl8FVM5I+GC91 -1K2GScuVr1QGbNgGE41b/+EmGVnAJLqBcXmQRFBoJJRfuLMR8SlBYaNByyM21cHx -MlAQTn/0hpPshNOOvEu/XAFOBz3cFIqUCqTqc/sLUegTBxj6DvEr0VQVfTzh97QZ -QmdiXnfgolXsttlpF9U6r0TtSsWe5HonfOV116rLJeffawrbD02TTqigzXsu8lkB -arcNuAeBfos4GzjmCleZPe4h6KP1DBbdi+w0jpwqHAAVF41og9JwnxgIzRFo1clr -Us3ERo/ctfPYV3Me6ZQ5BL/T3jjetFPsaRyifsSP5BtwrfKi+fv3FmRmaZ9JUaLi -FRhnBkp/1Wy1TbMz4GHrXb7pmA8y1x1LPC5aAVKRCfLf6o3YBkBjqhHk/sM3nhRS -P/TizPJhk9H9Z2vXUq6/aKtAQ6BXNVN48FP4YUIHZMbXb5tMOA1jrGKvNouicwoN -9SG9dKpN6nIDSdvHXx1iY8f93ZHsM+71bbRuMGjeyNYmsHVee7QHIJihdjK4TWxP -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMmAd+BikoL1Rpzz -uvdMw964o605MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAU18h -9bqwOlI5LJKwbADJ784g7wbylp7ppHR/ehb8t/W2+xUbP6umwHJdELFx7rxP462s -A20ucS6vxOOto70MEae0/0qyexAQH6dXQbLArvQsWdZHEIjzIVEpMMpghq9Gqx3t -OluwlN5E40EIosHsHdb9T7bWR9AUC8rmyrV7d35BH16Dx7aMOZawP5aBQW9gkOLo -+fsicdl9sz1Gv7SEr5AcD48Saq/v7h56rgJKihcrdv6sVIkkLE8/trKnToyokZf7 -KcZ7XC25y2a2t6hbElGFtQl+Ynhw/qlqYLYdDnkM/crqJIByw5c/8nerQyIKx+u2 -DISCLIBrQYoIwOula9+ZEsuK1V6ADJHgJgg2SMX6OBE1/yWDLfJ6v9r9jv6ly0Us -H8SIU653DtmadsWOLB2jutXsMq7Aqqz30XpN69QH4kj3Io6wpJ9qzo6ysmD0oyLQ -I+uUWnpp3Q+/QFesa1lQ2aOZ4W7+jQF5JyMV3pKdewlNWudLSDBaGOYKbeaP4NK7 -5t98biGCwWg5TbSYWGZizEqQXsP6JwSxeRV0mcy+rSDeJmAc61ZRpqPq5KM/p/9h -3PFaTWwyI0PurKju7koSCTxdccK+efrCh2gdC/1cacwG0Jp9VJkqyTkaGa9LKkPz -Y11aWOIv4x3kqdbQCtCev9eBCfHJxyYNrJgWVqA= ------END CERTIFICATE----- - -# Issuer: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Subject: CN=Buypass Class 3 Root CA O=Buypass AS-983163327 -# Label: "Buypass Class 3 Root CA" -# Serial: 2 -# MD5 Fingerprint: 3d:3b:18:9e:2c:64:5a:e8:d5:88:ce:0e:f9:37:c2:ec -# SHA1 Fingerprint: da:fa:f7:fa:66:84:ec:06:8f:14:50:bd:c7:c2:81:a5:bc:a9:64:57 -# SHA256 Fingerprint: ed:f7:eb:bc:a2:7a:2a:38:4d:38:7b:7d:40:10:c6:66:e2:ed:b4:84:3e:4c:29:b4:ae:1d:5b:93:32:e6:b2:4d ------BEGIN CERTIFICATE----- -MIIFWTCCA0GgAwIBAgIBAjANBgkqhkiG9w0BAQsFADBOMQswCQYDVQQGEwJOTzEd -MBsGA1UECgwUQnV5cGFzcyBBUy05ODMxNjMzMjcxIDAeBgNVBAMMF0J1eXBhc3Mg -Q2xhc3MgMyBSb290IENBMB4XDTEwMTAyNjA4Mjg1OFoXDTQwMTAyNjA4Mjg1OFow -TjELMAkGA1UEBhMCTk8xHTAbBgNVBAoMFEJ1eXBhc3MgQVMtOTgzMTYzMzI3MSAw -HgYDVQQDDBdCdXlwYXNzIENsYXNzIDMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEB -BQADggIPADCCAgoCggIBAKXaCpUWUOOV8l6ddjEGMnqb8RB2uACatVI2zSRHsJ8Y -ZLya9vrVediQYkwiL944PdbgqOkcLNt4EemOaFEVcsfzM4fkoF0LXOBXByow9c3E -N3coTRiR5r/VUv1xLXA+58bEiuPwKAv0dpihi4dVsjoT/Lc+JzeOIuOoTyrvYLs9 -tznDDgFHmV0ST9tD+leh7fmdvhFHJlsTmKtdFoqwNxxXnUX/iJY2v7vKB3tvh2PX -0DJq1l1sDPGzbjniazEuOQAnFN44wOwZZoYS6J1yFhNkUsepNxz9gjDthBgd9K5c -/3ATAOux9TN6S9ZV+AWNS2mw9bMoNlwUxFFzTWsL8TQH2xc519woe2v1n/MuwU8X -KhDzzMro6/1rqy6any2CbgTUUgGTLT2G/H783+9CHaZr77kgxve9oKeV/afmiSTY -zIw0bOIjL9kSGiG5VZFvC5F5GQytQIgLcOJ60g7YaEi7ghM5EFjp2CoHxhLbWNvS -O1UQRwUVZ2J+GGOmRj8JDlQyXr8NYnon74Do29lLBlo3WiXQCBJ31G8JUJc9yB3D -34xFMFbG02SrZvPAXpacw8Tvw3xrizp5f7NJzz3iiZ+gMEuFuZyUJHmPfWupRWgP -K9Dx2hzLabjKSWJtyNBjYt1gD1iqj6G8BaVmos8bdrKEZLFMOVLAMLrwjEsCsLa3 -AgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFEe4zf/lb+74suwv -Tg75JbCOPGvDMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAACAj -QTUEkMJAYmDv4jVM1z+s4jSQuKFvdvoWFqRINyzpkMLyPPgKn9iB5btb2iUspKdV -cSQy9sgL8rxq+JOssgfCX5/bzMiKqr5qb+FJEMwx14C7u8jYog5kV+qi9cKpMRXS -IGrs/CIBKM+GuIAeqcwRpTzyFrNHnfzSgCHEy9BHcEGhyoMZCCxt8l13nIoUE9Q2 -HJLw5QY33KbmkJs4j1xrG0aGQ0JfPgEHU1RdZX33inOhmlRaHylDFCfChQ+1iHsa -O5S3HWCntZznKWlXWpuTekMwGwPXYshApqr8ZORK15FTAaggiG6cX0S5y2CBNOxv -033aSF/rtJC8LakcC6wc1aJoIIAE1vyxjy+7SjENSoYc6+I2KSb12tjE8nVhz36u -dmNKekBlk4f4HoCMhuWG1o8O/FMsYOgWYRqiPkN7zTlgVGr18okmAWiDSKIz6MkE -kbIRNBE+6tBDGR8Dk5AM/1E9V/RBbuHLoL7ryWPNbczk+DaqaJ3tvV2XcEQNtg41 -3OEMXbugUZTLfhbrES+jkkXITHHZvMmZUldGL1DPvTVp9D0VzgalLA8+9oG6lLvD -u79leNKGef9JOxqDDPDeeOzI8k1MGt6CKfjBWtrt7uYnXuhF0J0cUahoq0Tj0Itq -4/g7u9xN12TyUb7mqqta6THuBrxzvxNiCp/HuZc= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 3 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 3" -# Serial: 1 -# MD5 Fingerprint: ca:fb:40:a8:4e:39:92:8a:1d:fe:8e:2f:c4:27:ea:ef -# SHA1 Fingerprint: 55:a6:72:3e:cb:f2:ec:cd:c3:23:74:70:19:9d:2a:be:11:e3:81:d1 -# SHA256 Fingerprint: fd:73:da:d3:1c:64:4f:f1:b4:3b:ef:0c:cd:da:96:71:0b:9c:d9:87:5e:ca:7e:31:70:7a:f3:e9:6d:52:2b:bd ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDMwHhcNMDgxMDAxMTAyOTU2WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDMwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC9dZPwYiJvJK7genasfb3ZJNW4t/zN -8ELg63iIVl6bmlQdTQyK9tPPcPRStdiTBONGhnFBSivwKixVA9ZIw+A5OO3yXDw/ -RLyTPWGrTs0NvvAgJ1gORH8EGoel15YUNpDQSXuhdfsaa3Ox+M6pCSzyU9XDFES4 -hqX2iys52qMzVNn6chr3IhUciJFrf2blw2qAsCTz34ZFiP0Zf3WHHx+xGwpzJFu5 -ZeAsVMhg02YXP+HMVDNzkQI6pn97djmiH5a2OK61yJN0HZ65tOVgnS9W0eDrXltM -EnAMbEQgqxHY9Bn20pxSN+f6tsIxO0rUFJmtxxr1XV/6B7h8DR/Wgx6zAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS1 -A/d2O2GCahKqGFPrAyGUv/7OyjANBgkqhkiG9w0BAQsFAAOCAQEAVj3vlNW92nOy -WL6ukK2YJ5f+AbGwUgC4TeQbIXQbfsDuXmkqJa9c1h3a0nnJ85cp4IaH3gRZD/FZ -1GSFS5mvJQQeyUapl96Cshtwn5z2r3Ex3XsFpSzTucpH9sry9uetuUg/vBa3wW30 -6gmv7PO15wWeph6KU1HWk4HMdJP2udqmJQV0eVp+QD6CSyYRMG7hP0HHRwA11fXT -91Q+gT3aSWqas+8QPebrb9HIIkfLzM8BMZLZGOMivgkeGj5asuRrDFR6fUNOuIml -e9eiPZaGzPImNC1qkp2aGtAw4l1OBLBfiyB+d8E9lYLRRpo7PHi4b6HQDWSieB4p -TpPDpFQUWw== ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 2009" -# Serial: 623603 -# MD5 Fingerprint: cd:e0:25:69:8d:47:ac:9c:89:35:90:f7:fd:51:3d:2f -# SHA1 Fingerprint: 58:e8:ab:b0:36:15:33:fb:80:f7:9b:1b:6d:29:d3:ff:8d:5f:00:f0 -# SHA256 Fingerprint: 49:e7:a4:42:ac:f0:ea:62:87:05:00:54:b5:25:64:b6:50:e4:f4:9e:42:e3:48:d6:aa:38:e0:39:e9:57:b1:c1 ------BEGIN CERTIFICATE----- -MIIEMzCCAxugAwIBAgIDCYPzMA0GCSqGSIb3DQEBCwUAME0xCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMMHkQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgMjAwOTAeFw0wOTExMDUwODM1NThaFw0yOTExMDUwODM1NTha -ME0xCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxJzAlBgNVBAMM -HkQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgMjAwOTCCASIwDQYJKoZIhvcNAQEB -BQADggEPADCCAQoCggEBANOySs96R+91myP6Oi/WUEWJNTrGa9v+2wBoqOADER03 -UAifTUpolDWzU9GUY6cgVq/eUXjsKj3zSEhQPgrfRlWLJ23DEE0NkVJD2IfgXU42 -tSHKXzlABF9bfsyjxiupQB7ZNoTWSPOSHjRGICTBpFGOShrvUD9pXRl/RcPHAY9R -ySPocq60vFYJfxLLHLGvKZAKyVXMD9O0Gu1HNVpK7ZxzBCHQqr0ME7UAyiZsxGsM -lFqVlNpQmvH/pStmMaTJOKDfHR+4CS7zp+hnUquVH+BGPtikw8paxTGA6Eian5Rp -/hnd2HN8gcqW3o7tszIFZYQ05ub9VxC1X3a/L7AQDcUCAwEAAaOCARowggEWMA8G -A1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFP3aFMSfMN4hvR5COfyrYyNJ4PGEMA4G -A1UdDwEB/wQEAwIBBjCB0wYDVR0fBIHLMIHIMIGAoH6gfIZ6bGRhcDovL2RpcmVj -dG9yeS5kLXRydXN0Lm5ldC9DTj1ELVRSVVNUJTIwUm9vdCUyMENsYXNzJTIwMyUy -MENBJTIwMiUyMDIwMDksTz1ELVRydXN0JTIwR21iSCxDPURFP2NlcnRpZmljYXRl -cmV2b2NhdGlvbmxpc3QwQ6BBoD+GPWh0dHA6Ly93d3cuZC10cnVzdC5uZXQvY3Js -L2QtdHJ1c3Rfcm9vdF9jbGFzc18zX2NhXzJfMjAwOS5jcmwwDQYJKoZIhvcNAQEL -BQADggEBAH+X2zDI36ScfSF6gHDOFBJpiBSVYEQBrLLpME+bUMJm2H6NMLVwMeni -acfzcNsgFYbQDfC+rAF1hM5+n02/t2A7nPPKHeJeaNijnZflQGDSNiH+0LS4F9p0 -o3/U37CYAqxva2ssJSRyoWXuJVrl5jLn8t+rSfrzkGkj2wTZ51xY/GXUl77M/C4K -zCUqNQT4YJEVdT1B/yMfGchs64JTBKbkTCJNjYy6zltz7GRUUG3RnFX7acM2w4y8 -PIWmawomDeCTmGCufsYkl4phX5GOZpIJhzbNi5stPvZR1FDUWSi9g/LMKHtThm3Y -Johw1+qRzT65ysCQblrGXnRl11z+o+I= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Subject: CN=D-TRUST Root Class 3 CA 2 EV 2009 O=D-Trust GmbH -# Label: "D-TRUST Root Class 3 CA 2 EV 2009" -# Serial: 623604 -# MD5 Fingerprint: aa:c6:43:2c:5e:2d:cd:c4:34:c0:50:4f:11:02:4f:b6 -# SHA1 Fingerprint: 96:c9:1b:0b:95:b4:10:98:42:fa:d0:d8:22:79:fe:60:fa:b9:16:83 -# SHA256 Fingerprint: ee:c5:49:6b:98:8c:e9:86:25:b9:34:09:2e:ec:29:08:be:d0:b0:f3:16:c2:d4:73:0c:84:ea:f1:f3:d3:48:81 ------BEGIN CERTIFICATE----- -MIIEQzCCAyugAwIBAgIDCYP0MA0GCSqGSIb3DQEBCwUAMFAxCzAJBgNVBAYTAkRF -MRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNVBAMMIUQtVFJVU1QgUm9vdCBD -bGFzcyAzIENBIDIgRVYgMjAwOTAeFw0wOTExMDUwODUwNDZaFw0yOTExMDUwODUw -NDZaMFAxCzAJBgNVBAYTAkRFMRUwEwYDVQQKDAxELVRydXN0IEdtYkgxKjAoBgNV -BAMMIUQtVFJVU1QgUm9vdCBDbGFzcyAzIENBIDIgRVYgMjAwOTCCASIwDQYJKoZI -hvcNAQEBBQADggEPADCCAQoCggEBAJnxhDRwui+3MKCOvXwEz75ivJn9gpfSegpn -ljgJ9hBOlSJzmY3aFS3nBfwZcyK3jpgAvDw9rKFs+9Z5JUut8Mxk2og+KbgPCdM0 -3TP1YtHhzRnp7hhPTFiu4h7WDFsVWtg6uMQYZB7jM7K1iXdODL/ZlGsTl28So/6Z -qQTMFexgaDbtCHu39b+T7WYxg4zGcTSHThfqr4uRjRxWQa4iN1438h3Z0S0NL2lR -p75mpoo6Kr3HGrHhFPC+Oh25z1uxav60sUYgovseO3Dvk5h9jHOW8sXvhXCtKSb8 -HgQ+HKDYD8tSg2J87otTlZCpV6LqYQXY+U3EJ/pure3511H3a6UCAwEAAaOCASQw -ggEgMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFNOUikxiEyoZLsyvcop9Ntea -HNxnMA4GA1UdDwEB/wQEAwIBBjCB3QYDVR0fBIHVMIHSMIGHoIGEoIGBhn9sZGFw -Oi8vZGlyZWN0b3J5LmQtdHJ1c3QubmV0L0NOPUQtVFJVU1QlMjBSb290JTIwQ2xh -c3MlMjAzJTIwQ0ElMjAyJTIwRVYlMjAyMDA5LE89RC1UcnVzdCUyMEdtYkgsQz1E -RT9jZXJ0aWZpY2F0ZXJldm9jYXRpb25saXN0MEagRKBChkBodHRwOi8vd3d3LmQt -dHJ1c3QubmV0L2NybC9kLXRydXN0X3Jvb3RfY2xhc3NfM19jYV8yX2V2XzIwMDku -Y3JsMA0GCSqGSIb3DQEBCwUAA4IBAQA07XtaPKSUiO8aEXUHL7P+PPoeUSbrh/Yp -3uDx1MYkCenBz1UbtDDZzhr+BlGmFaQt77JLvyAoJUnRpjZ3NOhk31KxEcdzes05 -nsKtjHEh8lprr988TlWvsoRlFIm5d8sqMb7Po23Pb0iUMkZv53GMoKaEGTcH8gNF -CSuGdXzfX2lXANtu2KZyIktQ1HWYVt+3GP9DQ1CuekR78HlR10M9p9OB0/DJT7na -xpeG0ILD5EJt/rDiZE4OJudANCa1CInXCGNjOCd1HjPqbqjdn5lPdE2BiYBL3ZqX -KVwvvoFBuYz/6n1gBp7N1z3TLqMVvKjmJuVvw9y4AyHqnxbxLFS1 ------END CERTIFICATE----- - -# Issuer: CN=CA Disig Root R2 O=Disig a.s. -# Subject: CN=CA Disig Root R2 O=Disig a.s. -# Label: "CA Disig Root R2" -# Serial: 10572350602393338211 -# MD5 Fingerprint: 26:01:fb:d8:27:a7:17:9a:45:54:38:1a:43:01:3b:03 -# SHA1 Fingerprint: b5:61:eb:ea:a4:de:e4:25:4b:69:1a:98:a5:57:47:c2:34:c7:d9:71 -# SHA256 Fingerprint: e2:3d:4a:03:6d:7b:70:e9:f5:95:b1:42:20:79:d2:b9:1e:df:bb:1f:b6:51:a0:63:3e:aa:8a:9d:c5:f8:07:03 ------BEGIN CERTIFICATE----- -MIIFaTCCA1GgAwIBAgIJAJK4iNuwisFjMA0GCSqGSIb3DQEBCwUAMFIxCzAJBgNV -BAYTAlNLMRMwEQYDVQQHEwpCcmF0aXNsYXZhMRMwEQYDVQQKEwpEaXNpZyBhLnMu -MRkwFwYDVQQDExBDQSBEaXNpZyBSb290IFIyMB4XDTEyMDcxOTA5MTUzMFoXDTQy -MDcxOTA5MTUzMFowUjELMAkGA1UEBhMCU0sxEzARBgNVBAcTCkJyYXRpc2xhdmEx -EzARBgNVBAoTCkRpc2lnIGEucy4xGTAXBgNVBAMTEENBIERpc2lnIFJvb3QgUjIw -ggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCio8QACdaFXS1tFPbCw3Oe -NcJxVX6B+6tGUODBfEl45qt5WDza/3wcn9iXAng+a0EE6UG9vgMsRfYvZNSrXaNH -PWSb6WiaxswbP7q+sos0Ai6YVRn8jG+qX9pMzk0DIaPY0jSTVpbLTAwAFjxfGs3I -x2ymrdMxp7zo5eFm1tL7A7RBZckQrg4FY8aAamkw/dLukO8NJ9+flXP04SXabBbe -QTg06ov80egEFGEtQX6sx3dOy1FU+16SGBsEWmjGycT6txOgmLcRK7fWV8x8nhfR -yyX+hk4kLlYMeE2eARKmK6cBZW58Yh2EhN/qwGu1pSqVg8NTEQxzHQuyRpDRQjrO -QG6Vrf/GlK1ul4SOfW+eioANSW1z4nuSHsPzwfPrLgVv2RvPN3YEyLRa5Beny912 -H9AZdugsBbPWnDTYltxhh5EF5EQIM8HauQhl1K6yNg3ruji6DOWbnuuNZt2Zz9aJ -QfYEkoopKW1rOhzndX0CcQ7zwOe9yxndnWCywmZgtrEE7snmhrmaZkCo5xHtgUUD -i/ZnWejBBhG93c+AAk9lQHhcR1DIm+YfgXvkRKhbhZri3lrVx/k6RGZL5DJUfORs -nLMOPReisjQS1n6yqEm70XooQL6iFh/f5DcfEXP7kAplQ6INfPgGAVUzfbANuPT1 -rqVCV3w2EYx7XsQDnYx5nQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1Ud -DwEB/wQEAwIBBjAdBgNVHQ4EFgQUtZn4r7CU9eMg1gqtzk5WpC5uQu0wDQYJKoZI -hvcNAQELBQADggIBACYGXnDnZTPIgm7ZnBc6G3pmsgH2eDtpXi/q/075KMOYKmFM -tCQSin1tERT3nLXK5ryeJ45MGcipvXrA1zYObYVybqjGom32+nNjf7xueQgcnYqf -GopTpti72TVVsRHFqQOzVju5hJMiXn7B9hJSi+osZ7z+Nkz1uM/Rs0mSO9MpDpkb -lvdhuDvEK7Z4bLQjb/D907JedR+Zlais9trhxTF7+9FGs9K8Z7RiVLoJ92Owk6Ka -+elSLotgEqv89WBW7xBci8QaQtyDW2QOy7W81k/BfDxujRNt+3vrMNDcTa/F1bal -TFtxyegxvug4BkihGuLq0t4SOVga/4AOgnXmt8kHbA7v/zjxmHHEt38OFdAlab0i -nSvtBfZGR6ztwPDUO+Ls7pZbkBNOHlY667DvlruWIxG68kOGdGSVyCh13x01utI3 -gzhTODY7z2zp+WsO0PsE6E9312UBeIYMej4hYvF/Y3EMyZ9E26gnonW+boE+18Dr -G5gPcFw0sorMwIUY6256s/daoQe/qUKS82Ail+QUoQebTnbAjn39pCXHR+3/H3Os -zMOl6W8KjptlwlCFtaOgUxLMVYdh84GuEEZhvUQhuMI9dM9+JDX6HAcOmz0iyu8x -L4ysEr3vQCj8KWefshNPZiTEUxnpHikV7+ZtsH8tZ/3zbBt1RqPlShfppNcL ------END CERTIFICATE----- - -# Issuer: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Subject: CN=ACCVRAIZ1 O=ACCV OU=PKIACCV -# Label: "ACCVRAIZ1" -# Serial: 6828503384748696800 -# MD5 Fingerprint: d0:a0:5a:ee:05:b6:09:94:21:a1:7d:f1:b2:29:82:02 -# SHA1 Fingerprint: 93:05:7a:88:15:c6:4f:ce:88:2f:fa:91:16:52:28:78:bc:53:64:17 -# SHA256 Fingerprint: 9a:6e:c0:12:e1:a7:da:9d:be:34:19:4d:47:8a:d7:c0:db:18:22:fb:07:1d:f1:29:81:49:6e:d1:04:38:41:13 ------BEGIN CERTIFICATE----- -MIIH0zCCBbugAwIBAgIIXsO3pkN/pOAwDQYJKoZIhvcNAQEFBQAwQjESMBAGA1UE -AwwJQUNDVlJBSVoxMRAwDgYDVQQLDAdQS0lBQ0NWMQ0wCwYDVQQKDARBQ0NWMQsw -CQYDVQQGEwJFUzAeFw0xMTA1MDUwOTM3MzdaFw0zMDEyMzEwOTM3MzdaMEIxEjAQ -BgNVBAMMCUFDQ1ZSQUlaMTEQMA4GA1UECwwHUEtJQUNDVjENMAsGA1UECgwEQUND -VjELMAkGA1UEBhMCRVMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCb -qau/YUqXry+XZpp0X9DZlv3P4uRm7x8fRzPCRKPfmt4ftVTdFXxpNRFvu8gMjmoY -HtiP2Ra8EEg2XPBjs5BaXCQ316PWywlxufEBcoSwfdtNgM3802/J+Nq2DoLSRYWo -G2ioPej0RGy9ocLLA76MPhMAhN9KSMDjIgro6TenGEyxCQ0jVn8ETdkXhBilyNpA -lHPrzg5XPAOBOp0KoVdDaaxXbXmQeOW1tDvYvEyNKKGno6e6Ak4l0Squ7a4DIrhr -IA8wKFSVf+DuzgpmndFALW4ir50awQUZ0m/A8p/4e7MCQvtQqR0tkw8jq8bBD5L/ -0KIV9VMJcRz/RROE5iZe+OCIHAr8Fraocwa48GOEAqDGWuzndN9wrqODJerWx5eH -k6fGioozl2A3ED6XPm4pFdahD9GILBKfb6qkxkLrQaLjlUPTAYVtjrs78yM2x/47 -4KElB0iryYl0/wiPgL/AlmXz7uxLaL2diMMxs0Dx6M/2OLuc5NF/1OVYm3z61PMO -m3WR5LpSLhl+0fXNWhn8ugb2+1KoS5kE3fj5tItQo05iifCHJPqDQsGH+tUtKSpa -cXpkatcnYGMN285J9Y0fkIkyF/hzQ7jSWpOGYdbhdQrqeWZ2iE9x6wQl1gpaepPl -uUsXQA+xtrn13k/c4LOsOxFwYIRKQ26ZIMApcQrAZQIDAQABo4ICyzCCAscwfQYI -KwYBBQUHAQEEcTBvMEwGCCsGAQUFBzAChkBodHRwOi8vd3d3LmFjY3YuZXMvZmls -ZWFkbWluL0FyY2hpdm9zL2NlcnRpZmljYWRvcy9yYWl6YWNjdjEuY3J0MB8GCCsG -AQUFBzABhhNodHRwOi8vb2NzcC5hY2N2LmVzMB0GA1UdDgQWBBTSh7Tj3zcnk1X2 -VuqB5TbMjB4/vTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFNKHtOPfNyeT -VfZW6oHlNsyMHj+9MIIBcwYDVR0gBIIBajCCAWYwggFiBgRVHSAAMIIBWDCCASIG -CCsGAQUFBwICMIIBFB6CARAAQQB1AHQAbwByAGkAZABhAGQAIABkAGUAIABDAGUA -cgB0AGkAZgBpAGMAYQBjAGkA8wBuACAAUgBhAO0AegAgAGQAZQAgAGwAYQAgAEEA -QwBDAFYAIAAoAEEAZwBlAG4AYwBpAGEAIABkAGUAIABUAGUAYwBuAG8AbABvAGcA -7QBhACAAeQAgAEMAZQByAHQAaQBmAGkAYwBhAGMAaQDzAG4AIABFAGwAZQBjAHQA -cgDzAG4AaQBjAGEALAAgAEMASQBGACAAUQA0ADYAMAAxADEANQA2AEUAKQAuACAA -QwBQAFMAIABlAG4AIABoAHQAdABwADoALwAvAHcAdwB3AC4AYQBjAGMAdgAuAGUA -czAwBggrBgEFBQcCARYkaHR0cDovL3d3dy5hY2N2LmVzL2xlZ2lzbGFjaW9uX2Mu -aHRtMFUGA1UdHwROMEwwSqBIoEaGRGh0dHA6Ly93d3cuYWNjdi5lcy9maWxlYWRt -aW4vQXJjaGl2b3MvY2VydGlmaWNhZG9zL3JhaXphY2N2MV9kZXIuY3JsMA4GA1Ud -DwEB/wQEAwIBBjAXBgNVHREEEDAOgQxhY2N2QGFjY3YuZXMwDQYJKoZIhvcNAQEF -BQADggIBAJcxAp/n/UNnSEQU5CmH7UwoZtCPNdpNYbdKl02125DgBS4OxnnQ8pdp -D70ER9m+27Up2pvZrqmZ1dM8MJP1jaGo/AaNRPTKFpV8M9xii6g3+CfYCS0b78gU -JyCpZET/LtZ1qmxNYEAZSUNUY9rizLpm5U9EelvZaoErQNV/+QEnWCzI7UiRfD+m -AM/EKXMRNt6GGT6d7hmKG9Ww7Y49nCrADdg9ZuM8Db3VlFzi4qc1GwQA9j9ajepD -vV+JHanBsMyZ4k0ACtrJJ1vnE5Bc5PUzolVt3OAJTS+xJlsndQAJxGJ3KQhfnlms -tn6tn1QwIgPBHnFk/vk4CpYY3QIUrCPLBhwepH2NDd4nQeit2hW3sCPdK6jT2iWH -7ehVRE2I9DZ+hJp4rPcOVkkO1jMl1oRQQmwgEh0q1b688nCBpHBgvgW1m54ERL5h -I6zppSSMEYCUWqKiuUnSwdzRp+0xESyeGabu4VXhwOrPDYTkF7eifKXeVSUG7szA -h1xA2syVP1XgNce4hL60Xc16gwFy7ofmXx2utYXGJt/mwZrpHgJHnyqobalbz+xF -d3+YJ5oyXSrjhO7FmGYvliAd3djDJ9ew+f7Zfc3Qn48LFFhRny+Lwzgt3uiP1o2H -pPVWQxaZLPSkVrQ0uGE3ycJYgBugl6H8WY3pEfbRD0tVNEYqi4Y7 ------END CERTIFICATE----- - -# Issuer: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA Global Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA Global Root CA" -# Serial: 3262 -# MD5 Fingerprint: f9:03:7e:cf:e6:9e:3c:73:7a:2a:90:07:69:ff:2b:96 -# SHA1 Fingerprint: 9c:bb:48:53:f6:a4:f6:d3:52:a4:e8:32:52:55:60:13:f5:ad:af:65 -# SHA256 Fingerprint: 59:76:90:07:f7:68:5d:0f:cd:50:87:2f:9f:95:d5:75:5a:5b:2b:45:7d:81:f3:69:2b:61:0a:98:67:2f:0e:1b ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgICDL4wDQYJKoZIhvcNAQELBQAwUTELMAkGA1UEBhMCVFcx -EjAQBgNVBAoTCVRBSVdBTi1DQTEQMA4GA1UECxMHUm9vdCBDQTEcMBoGA1UEAxMT -VFdDQSBHbG9iYWwgUm9vdCBDQTAeFw0xMjA2MjcwNjI4MzNaFw0zMDEyMzExNTU5 -NTlaMFExCzAJBgNVBAYTAlRXMRIwEAYDVQQKEwlUQUlXQU4tQ0ExEDAOBgNVBAsT -B1Jvb3QgQ0ExHDAaBgNVBAMTE1RXQ0EgR2xvYmFsIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCwBdvI64zEbooh745NnHEKH1Jw7W2CnJfF -10xORUnLQEK1EjRsGcJ0pDFfhQKX7EMzClPSnIyOt7h52yvVavKOZsTuKwEHktSz -0ALfUPZVr2YOy+BHYC8rMjk1Ujoog/h7FsYYuGLWRyWRzvAZEk2tY/XTP3VfKfCh -MBwqoJimFb3u/Rk28OKRQ4/6ytYQJ0lM793B8YVwm8rqqFpD/G2Gb3PpN0Wp8DbH -zIh1HrtsBv+baz4X7GGqcXzGHaL3SekVtTzWoWH1EfcFbx39Eb7QMAfCKbAJTibc -46KokWofwpFFiFzlmLhxpRUZyXx1EcxwdE8tmx2RRP1WKKD+u4ZqyPpcC1jcxkt2 -yKsi2XMPpfRaAok/T54igu6idFMqPVMnaR1sjjIsZAAmY2E2TqNGtz99sy2sbZCi -laLOz9qC5wc0GZbpuCGqKX6mOL6OKUohZnkfs8O1CWfe1tQHRvMq2uYiN2DLgbYP -oA/pyJV/v1WRBXrPPRXAb94JlAGD1zQbzECl8LibZ9WYkTunhHiVJqRaCPgrdLQA -BDzfuBSO6N+pjWxnkjMdwLfS7JLIvgm/LCkFbwJrnu+8vyq8W8BQj0FwcYeyTbcE -qYSjMq+u7msXi7Kx/mzhkIyIqJdIzshNy/MGz19qCkKxHh53L46g5pIOBvwFItIm -4TFRfTLcDwIDAQABoyMwITAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zANBgkqhkiG9w0BAQsFAAOCAgEAXzSBdu+WHdXltdkCY4QWwa6gcFGn90xHNcgL -1yg9iXHZqjNB6hQbbCEAwGxCGX6faVsgQt+i0trEfJdLjbDorMjupWkEmQqSpqsn -LhpNgb+E1HAerUf+/UqdM+DyucRFCCEK2mlpc3INvjT+lIutwx4116KD7+U4x6WF -H6vPNOw/KP4M8VeGTslV9xzU2KV9Bnpv1d8Q34FOIWWxtuEXeZVFBs5fzNxGiWNo -RI2T9GRwoD2dKAXDOXC4Ynsg/eTb6QihuJ49CcdP+yz4k3ZB3lLg4VfSnQO8d57+ -nile98FRYB/e2guyLXW3Q0iT5/Z5xoRdgFlglPx4mI88k1HtQJAH32RjJMtOcQWh -15QaiDLxInQirqWm2BJpTGCjAu4r7NRjkgtevi92a6O2JryPA9gK8kxkRr05YuWW -6zRjESjMlfGt7+/cgFhI6Uu46mWs6fyAtbXIRfmswZ/ZuepiiI7E8UuDEq3mi4TW -nsLrgxifarsbJGAzcMzs9zLzXNl5fe+epP7JI8Mk7hWSsT2RTyaGvWZzJBPqpK5j -wa19hAM8EHiGG3njxPPyBJUgriOCxLM6AGK/5jYk4Ve6xx6QddVfP5VhK8E7zeWz -aGHQRiapIVJpLesux+t3zqY6tQMzT3bR51xUAV3LePTJDL/PEo4XLSNolOer/qmy -KwbQBM0= ------END CERTIFICATE----- - -# Issuer: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Subject: CN=TeliaSonera Root CA v1 O=TeliaSonera -# Label: "TeliaSonera Root CA v1" -# Serial: 199041966741090107964904287217786801558 -# MD5 Fingerprint: 37:41:49:1b:18:56:9a:26:f5:ad:c2:66:fb:40:a5:4c -# SHA1 Fingerprint: 43:13:bb:96:f1:d5:86:9b:c1:4e:6a:92:f6:cf:f6:34:69:87:82:37 -# SHA256 Fingerprint: dd:69:36:fe:21:f8:f0:77:c1:23:a1:a5:21:c1:22:24:f7:22:55:b7:3e:03:a7:26:06:93:e8:a2:4b:0f:a3:89 ------BEGIN CERTIFICATE----- -MIIFODCCAyCgAwIBAgIRAJW+FqD3LkbxezmCcvqLzZYwDQYJKoZIhvcNAQEFBQAw -NzEUMBIGA1UECgwLVGVsaWFTb25lcmExHzAdBgNVBAMMFlRlbGlhU29uZXJhIFJv -b3QgQ0EgdjEwHhcNMDcxMDE4MTIwMDUwWhcNMzIxMDE4MTIwMDUwWjA3MRQwEgYD -VQQKDAtUZWxpYVNvbmVyYTEfMB0GA1UEAwwWVGVsaWFTb25lcmEgUm9vdCBDQSB2 -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMK+6yfwIaPzaSZVfp3F -VRaRXP3vIb9TgHot0pGMYzHw7CTww6XScnwQbfQ3t+XmfHnqjLWCi65ItqwA3GV1 -7CpNX8GH9SBlK4GoRz6JI5UwFpB/6FcHSOcZrr9FZ7E3GwYq/t75rH2D+1665I+X -Z75Ljo1kB1c4VWk0Nj0TSO9P4tNmHqTPGrdeNjPUtAa9GAH9d4RQAEX1jF3oI7x+ -/jXh7VB7qTCNGdMJjmhnXb88lxhTuylixcpecsHHltTbLaC0H2kD7OriUPEMPPCs -81Mt8Bz17Ww5OXOAFshSsCPN4D7c3TxHoLs1iuKYaIu+5b9y7tL6pe0S7fyYGKkm -dtwoSxAgHNN/Fnct7W+A90m7UwW7XWjH1Mh1Fj+JWov3F0fUTPHSiXk+TT2YqGHe -Oh7S+F4D4MHJHIzTjU3TlTazN19jY5szFPAtJmtTfImMMsJu7D0hADnJoWjiUIMu -sDor8zagrC/kb2HCUQk5PotTubtn2txTuXZZNp1D5SDgPTJghSJRt8czu90VL6R4 -pgd7gUY2BIbdeTXHlSw7sKMXNeVzH7RcWe/a6hBle3rQf5+ztCo3O3CLm1u5K7fs -slESl1MpWtTwEhDcTwK7EpIvYtQ/aUN8Ddb8WHUBiJ1YFkveupD/RwGJBmr2X7KQ -arMCpgKIv7NHfirZ1fpoeDVNAgMBAAGjPzA9MA8GA1UdEwEB/wQFMAMBAf8wCwYD -VR0PBAQDAgEGMB0GA1UdDgQWBBTwj1k4ALP1j5qWDNXr+nuqF+gTEjANBgkqhkiG -9w0BAQUFAAOCAgEAvuRcYk4k9AwI//DTDGjkk0kiP0Qnb7tt3oNmzqjMDfz1mgbl -dxSR651Be5kqhOX//CHBXfDkH1e3damhXwIm/9fH907eT/j3HEbAek9ALCI18Bmx -0GtnLLCo4MBANzX2hFxc469CeP6nyQ1Q6g2EdvZR74NTxnr/DlZJLo961gzmJ1Tj -TQpgcmLNkQfWpb/ImWvtxBnmq0wROMVvMeJuScg/doAmAyYp4Db29iBT4xdwNBed -Y2gea+zDTYa4EzAvXUYNR0PVG6pZDrlcjQZIrXSHX8f8MVRBE+LHIQ6e4B4N4cB7 -Q4WQxYpYxmUKeFfyxiMPAdkgS94P+5KFdSpcc41teyWRyu5FrgZLAMzTsVlQ2jqI -OylDRl6XK1TOU2+NSueW+r9xDkKLfP0ooNBIytrEgUy7onOTJsjrDNYmiLbAJM+7 -vVvrdX3pCI6GMyx5dwlppYn8s3CQh3aP0yK7Qs69cwsgJirQmz1wHiRszYd2qReW -t88NkvuOGKmYSdGe/mBEciG5Ge3C9THxOUiIkCR1VBatzvT4aRRkOfujuLpwQMcn -HL/EVlP6Y2XQ8xwOFvVrhlhNGNTkDY6lnVuR3HYkUD/GKvvZt5y11ubQ2egZixVx -SK236thZiNSQvxaz2emsWWFUyBy6ysHK4bkgTI86k4mloMy/0/Z1pHWWbVY= ------END CERTIFICATE----- - -# Issuer: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Subject: CN=T-TeleSec GlobalRoot Class 2 O=T-Systems Enterprise Services GmbH OU=T-Systems Trust Center -# Label: "T-TeleSec GlobalRoot Class 2" -# Serial: 1 -# MD5 Fingerprint: 2b:9b:9e:e4:7b:6c:1f:00:72:1a:cc:c1:77:79:df:6a -# SHA1 Fingerprint: 59:0d:2d:7d:88:4f:40:2e:61:7e:a5:62:32:17:65:cf:17:d8:94:e9 -# SHA256 Fingerprint: 91:e2:f5:78:8d:58:10:eb:a7:ba:58:73:7d:e1:54:8a:8e:ca:cd:01:45:98:bc:0b:14:3e:04:1b:17:05:25:52 ------BEGIN CERTIFICATE----- -MIIDwzCCAqugAwIBAgIBATANBgkqhkiG9w0BAQsFADCBgjELMAkGA1UEBhMCREUx -KzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnByaXNlIFNlcnZpY2VzIEdtYkgxHzAd -BgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50ZXIxJTAjBgNVBAMMHFQtVGVsZVNl -YyBHbG9iYWxSb290IENsYXNzIDIwHhcNMDgxMDAxMTA0MDE0WhcNMzMxMDAxMjM1 -OTU5WjCBgjELMAkGA1UEBhMCREUxKzApBgNVBAoMIlQtU3lzdGVtcyBFbnRlcnBy -aXNlIFNlcnZpY2VzIEdtYkgxHzAdBgNVBAsMFlQtU3lzdGVtcyBUcnVzdCBDZW50 -ZXIxJTAjBgNVBAMMHFQtVGVsZVNlYyBHbG9iYWxSb290IENsYXNzIDIwggEiMA0G -CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCqX9obX+hzkeXaXPSi5kfl82hVYAUd -AqSzm1nzHoqvNK38DcLZSBnuaY/JIPwhqgcZ7bBcrGXHX+0CfHt8LRvWurmAwhiC -FoT6ZrAIxlQjgeTNuUk/9k9uN0goOA/FvudocP05l03Sx5iRUKrERLMjfTlH6VJi -1hKTXrcxlkIF+3anHqP1wvzpesVsqXFP6st4vGCvx9702cu+fjOlbpSD8DT6Iavq -jnKgP6TeMFvvhk1qlVtDRKgQFRzlAVfFmPHmBiiRqiDFt1MmUUOyCxGVWOHAD3bZ -wI18gfNycJ5v/hqO2V81xrJvNHy+SE/iWjnX2J14np+GPgNeGYtEotXHAgMBAAGj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBS/ -WSA2AHmgoCJrjNXyYdK4LMuCSjANBgkqhkiG9w0BAQsFAAOCAQEAMQOiYQsfdOhy -NsZt+U2e+iKo4YFWz827n+qrkRk4r6p8FU3ztqONpfSO9kSpp+ghla0+AGIWiPAC -uvxhI+YzmzB6azZie60EI4RYZeLbK4rnJVM3YlNfvNoBYimipidx5joifsFvHZVw -IEoHNN/q/xWA5brXethbdXwFeilHfkCoMRN3zUA7tFFHei4R40cR3p1m0IvVVGb6 -g1XqfMIpiRvpb7PO4gWEyS8+eIVibslfwXhjdFjASBgMmTnrpMwatXlajRWc2BQN -9noHV8cigwUtPJslJj0Ys6lDfMjIq2SPDqO/nBudMNva0Bkuqjzx+zOAduTNrRlP -BSeOE6Fuwg== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot 2011 O=Atos -# Subject: CN=Atos TrustedRoot 2011 O=Atos -# Label: "Atos TrustedRoot 2011" -# Serial: 6643877497813316402 -# MD5 Fingerprint: ae:b9:c4:32:4b:ac:7f:5d:66:cc:77:94:bb:2a:77:56 -# SHA1 Fingerprint: 2b:b1:f5:3e:55:0c:1d:c5:f1:d4:e6:b7:6a:46:4b:55:06:02:ac:21 -# SHA256 Fingerprint: f3:56:be:a2:44:b7:a9:1e:b3:5d:53:ca:9a:d7:86:4a:ce:01:8e:2d:35:d5:f8:f9:6d:df:68:a6:f4:1a:a4:74 ------BEGIN CERTIFICATE----- -MIIDdzCCAl+gAwIBAgIIXDPLYixfszIwDQYJKoZIhvcNAQELBQAwPDEeMBwGA1UE -AwwVQXRvcyBUcnVzdGVkUm9vdCAyMDExMQ0wCwYDVQQKDARBdG9zMQswCQYDVQQG -EwJERTAeFw0xMTA3MDcxNDU4MzBaFw0zMDEyMzEyMzU5NTlaMDwxHjAcBgNVBAMM -FUF0b3MgVHJ1c3RlZFJvb3QgMjAxMTENMAsGA1UECgwEQXRvczELMAkGA1UEBhMC -REUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCVhTuXbyo7LjvPpvMp -Nb7PGKw+qtn4TaA+Gke5vJrf8v7MPkfoepbCJI419KkM/IL9bcFyYie96mvr54rM -VD6QUM+A1JX76LWC1BTFtqlVJVfbsVD2sGBkWXppzwO3bw2+yj5vdHLqqjAqc2K+ -SZFhyBH+DgMq92og3AIVDV4VavzjgsG1xZ1kCWyjWZgHJ8cblithdHFsQ/H3NYkQ -4J7sVaE3IqKHBAUsR320HLliKWYoyrfhk/WklAOZuXCFteZI6o1Q/NnezG8HDt0L -cp2AMBYHlT8oDv3FdU9T1nSatCQujgKRz3bFmx5VdJx4IbHwLfELn8LVlhgf8FQi -eowHAgMBAAGjfTB7MB0GA1UdDgQWBBSnpQaxLKYJYO7Rl+lwrrw7GWzbITAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFKelBrEspglg7tGX6XCuvDsZbNshMBgG -A1UdIAQRMA8wDQYLKwYBBAGwLQMEAQEwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3 -DQEBCwUAA4IBAQAmdzTblEiGKkGdLD4GkGDEjKwLVLgfuXvTBznk+j57sj1O7Z8j -vZfza1zv7v1Apt+hk6EKhqzvINB5Ab149xnYJDE0BAGmuhWawyfc2E8PzBhj/5kP -DpFrdRbhIfzYJsdHt6bPWHJxfrrhTZVHO8mvbaG0weyJ9rQPOLXiZNwlz6bb65pc -maHFCN795trV1lpFDMS3wrUU77QR/w4VtfX128a961qn8FYiqTxlVMYVqL2Gns2D -lmh6cYGJ4Qvh6hEbaAjMaZ7snkGeRDImeuKHCnE96+RapNLbxc3G3mB/ufNPRJLv -KrcYPqcZ2Qt9sTdBQrC6YB3y/gkRsPCHe6ed ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 1 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 1 G3" -# Serial: 687049649626669250736271037606554624078720034195 -# MD5 Fingerprint: a4:bc:5b:3f:fe:37:9a:fa:64:f0:e2:fa:05:3d:0b:ab -# SHA1 Fingerprint: 1b:8e:ea:57:96:29:1a:c9:39:ea:b8:0a:81:1a:73:73:c0:93:79:67 -# SHA256 Fingerprint: 8a:86:6f:d1:b2:76:b5:7e:57:8e:92:1c:65:82:8a:2b:ed:58:e9:f2:f2:88:05:41:34:b7:f1:f4:bf:c9:cc:74 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIUeFhfLq0sGUvjNwc1NBMotZbUZZMwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMSBHMzAeFw0xMjAxMTIxNzI3NDRaFw00 -MjAxMTIxNzI3NDRaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDEgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCgvlAQjunybEC0BJyFuTHK3C3kEakEPBtV -wedYMB0ktMPvhd6MLOHBPd+C5k+tR4ds7FtJwUrVu4/sh6x/gpqG7D0DmVIB0jWe -rNrwU8lmPNSsAgHaJNM7qAJGr6Qc4/hzWHa39g6QDbXwz8z6+cZM5cOGMAqNF341 -68Xfuw6cwI2H44g4hWf6Pser4BOcBRiYz5P1sZK0/CPTz9XEJ0ngnjybCKOLXSoh -4Pw5qlPafX7PGglTvF0FBM+hSo+LdoINofjSxxR3W5A2B4GbPgb6Ul5jxaYA/qXp -UhtStZI5cgMJYr2wYBZupt0lwgNm3fME0UDiTouG9G/lg6AnhF4EwfWQvTA9xO+o -abw4m6SkltFi2mnAAZauy8RRNOoMqv8hjlmPSlzkYZqn0ukqeI1RPToV7qJZjqlc -3sX5kCLliEVx3ZGZbHqfPT2YfF72vhZooF6uCyP8Wg+qInYtyaEQHeTTRCOQiJ/G -KubX9ZqzWB4vMIkIG1SitZgj7Ah3HJVdYdHLiZxfokqRmu8hqkkWCKi9YSgxyXSt -hfbZxbGL0eUQMk1fiyA6PEkfM4VZDdvLCXVDaXP7a3F98N/ETH3Goy7IlXnLc6KO -Tk0k+17kBL5yG6YnLUlamXrXXAkgt3+UuU/xDRxeiEIbEbfnkduebPRq34wGmAOt -zCjvpUfzUwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUo5fW816iEOGrRZ88F2Q87gFwnMwwDQYJKoZIhvcNAQELBQAD -ggIBABj6W3X8PnrHX3fHyt/PX8MSxEBd1DKquGrX1RUVRpgjpeaQWxiZTOOtQqOC -MTaIzen7xASWSIsBx40Bz1szBpZGZnQdT+3Btrm0DWHMY37XLneMlhwqI2hrhVd2 -cDMT/uFPpiN3GPoajOi9ZcnPP/TJF9zrx7zABC4tRi9pZsMbj/7sPtPKlL92CiUN -qXsCHKnQO18LwIE6PWThv6ctTr1NxNgpxiIY0MWscgKCP6o6ojoilzHdCGPDdRS5 -YCgtW2jgFqlmgiNR9etT2DGbe+m3nUvriBbP+V04ikkwj+3x6xn0dxoxGE1nVGwv -b2X52z3sIexe9PSLymBlVNFxZPT5pqOBMzYzcfCkeF9OrYMh3jRJjehZrJ3ydlo2 -8hP0r+AJx2EqbPfgna67hkooby7utHnNkDPDs3b69fBsnQGQ+p6Q9pxyz0fawx/k -NSBT8lTR32GDpgLiJTjehTItXnOQUl1CxM49S+H5GYQd1aJQzEH7QRTDvdbJWqNj -ZgKAvQU6O0ec7AAmTPWIUb+oI38YB7AL7YsmoWTTYUrrXJ/es69nA7Mf3W1daWhp -q1467HxpvMc7hU6eFbm0FU/DlXpY18ls6Wy58yljXrQs8C097Vpl4KlbQMJImYFt -nh8GKjwStIsPm6Ik8KaN1nrgS7ZklmOVhMJKzRwuJIczYOXD ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 2 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 2 G3" -# Serial: 390156079458959257446133169266079962026824725800 -# MD5 Fingerprint: af:0c:86:6e:bf:40:2d:7f:0b:3e:12:50:ba:12:3d:06 -# SHA1 Fingerprint: 09:3c:61:f3:8b:8b:dc:7d:55:df:75:38:02:05:00:e1:25:f5:c8:36 -# SHA256 Fingerprint: 8f:e4:fb:0a:f9:3a:4d:0d:67:db:0b:eb:b2:3e:37:c7:1b:f3:25:dc:bc:dd:24:0e:a0:4d:af:58:b4:7e:18:40 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIURFc0JFuBiZs18s64KztbpybwdSgwDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMiBHMzAeFw0xMjAxMTIxODU5MzJaFw00 -MjAxMTIxODU5MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDIgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQChriWyARjcV4g/Ruv5r+LrI3HimtFhZiFf -qq8nUeVuGxbULX1QsFN3vXg6YOJkApt8hpvWGo6t/x8Vf9WVHhLL5hSEBMHfNrMW -n4rjyduYNM7YMxcoRvynyfDStNVNCXJJ+fKH46nafaF9a7I6JaltUkSs+L5u+9ym -c5GQYaYDFCDy54ejiK2toIz/pgslUiXnFgHVy7g1gQyjO/Dh4fxaXc6AcW34Sas+ -O7q414AB+6XrW7PFXmAqMaCvN+ggOp+oMiwMzAkd056OXbxMmO7FGmh77FOm6RQ1 -o9/NgJ8MSPsc9PG/Srj61YxxSscfrf5BmrODXfKEVu+lV0POKa2Mq1W/xPtbAd0j -IaFYAI7D0GoT7RPjEiuA3GfmlbLNHiJuKvhB1PLKFAeNilUSxmn1uIZoL1NesNKq -IcGY5jDjZ1XHm26sGahVpkUG0CM62+tlXSoREfA7T8pt9DTEceT/AFr2XK4jYIVz -8eQQsSWu1ZK7E8EM4DnatDlXtas1qnIhO4M15zHfeiFuuDIIfR0ykRVKYnLP43eh -vNURG3YBZwjgQQvD6xVu+KQZ2aKrr+InUlYrAoosFCT5v0ICvybIxo/gbjh9Uy3l -7ZizlWNof/k19N+IxWA1ksB8aRxhlRbQ694Lrz4EEEVlWFA4r0jyWbYW8jwNkALG -cC4BrTwV1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQU7edvdlq/YOxJW8ald7tyFnGbxD0wDQYJKoZIhvcNAQELBQAD -ggIBAJHfgD9DCX5xwvfrs4iP4VGyvD11+ShdyLyZm3tdquXK4Qr36LLTn91nMX66 -AarHakE7kNQIXLJgapDwyM4DYvmL7ftuKtwGTTwpD4kWilhMSA/ohGHqPHKmd+RC -roijQ1h5fq7KpVMNqT1wvSAZYaRsOPxDMuHBR//47PERIjKWnML2W2mWeyAMQ0Ga -W/ZZGYjeVYg3UQt4XAoeo0L9x52ID8DyeAIkVJOviYeIyUqAHerQbj5hLja7NQ4n -lv1mNDthcnPxFlxHBlRJAHpYErAK74X9sbgzdWqTHBLmYF5vHX/JHyPLhGGfHoJE -+V+tYlUkmlKY7VHnoX6XOuYvHxHaU4AshZ6rNRDbIl9qxV6XU/IyAgkwo1jwDQHV -csaxfGl7w/U2Rcxhbl5MlMVerugOXou/983g7aEOGzPuVBj+D77vfoRrQ+NwmNtd -dbINWQeFFSM51vHfqSYP1kjHs6Yi9TM3WpVHn3u6GBVv/9YUZINJ0gpnIdsPNWNg -KCLjsZWDzYWm3S8P52dSbrsvhXz1SnPnxT7AvSESBT/8twNJAlvIJebiVDj1eYeM -HVOyToV7BjjHLPj4sHKNJeV3UvQDHEimUF+IIDBu8oJDqz2XhOdT+yHBTw8imoa4 -WSr2Rz0ZiC3oheGe7IUIarFsNMkd7EgrO3jtZsSOeWmD3n+M ------END CERTIFICATE----- - -# Issuer: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Subject: CN=QuoVadis Root CA 3 G3 O=QuoVadis Limited -# Label: "QuoVadis Root CA 3 G3" -# Serial: 268090761170461462463995952157327242137089239581 -# MD5 Fingerprint: df:7d:b9:ad:54:6f:68:a1:df:89:57:03:97:43:b0:d7 -# SHA1 Fingerprint: 48:12:bd:92:3c:a8:c4:39:06:e7:30:6d:27:96:e6:a4:cf:22:2e:7d -# SHA256 Fingerprint: 88:ef:81:de:20:2e:b0:18:45:2e:43:f8:64:72:5c:ea:5f:bd:1f:c2:d9:d2:05:73:07:09:c5:d8:b8:69:0f:46 ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIULvWbAiin23r/1aOp7r0DoM8Sah0wDQYJKoZIhvcNAQEL -BQAwSDELMAkGA1UEBhMCQk0xGTAXBgNVBAoTEFF1b1ZhZGlzIExpbWl0ZWQxHjAc -BgNVBAMTFVF1b1ZhZGlzIFJvb3QgQ0EgMyBHMzAeFw0xMjAxMTIyMDI2MzJaFw00 -MjAxMTIyMDI2MzJaMEgxCzAJBgNVBAYTAkJNMRkwFwYDVQQKExBRdW9WYWRpcyBM -aW1pdGVkMR4wHAYDVQQDExVRdW9WYWRpcyBSb290IENBIDMgRzMwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCzyw4QZ47qFJenMioKVjZ/aEzHs286IxSR -/xl/pcqs7rN2nXrpixurazHb+gtTTK/FpRp5PIpM/6zfJd5O2YIyC0TeytuMrKNu -FoM7pmRLMon7FhY4futD4tN0SsJiCnMK3UmzV9KwCoWdcTzeo8vAMvMBOSBDGzXR -U7Ox7sWTaYI+FrUoRqHe6okJ7UO4BUaKhvVZR74bbwEhELn9qdIoyhA5CcoTNs+c -ra1AdHkrAj80//ogaX3T7mH1urPnMNA3I4ZyYUUpSFlob3emLoG+B01vr87ERROR -FHAGjx+f+IdpsQ7vw4kZ6+ocYfx6bIrc1gMLnia6Et3UVDmrJqMz6nWB2i3ND0/k -A9HvFZcba5DFApCTZgIhsUfei5pKgLlVj7WiL8DWM2fafsSntARE60f75li59wzw -eyuxwHApw0BiLTtIadwjPEjrewl5qW3aqDCYz4ByA4imW0aucnl8CAMhZa634Ryl -sSqiMd5mBPfAdOhx3v89WcyWJhKLhZVXGqtrdQtEPREoPHtht+KPZ0/l7DxMYIBp -VzgeAVuNVejH38DMdyM0SXV89pgR6y3e7UEuFAUCf+D+IOs15xGsIs5XPd7JMG0Q -A4XN8f+MFrXBsj6IbGB/kE+V9/YtrQE5BwT6dYB9v0lQ7e/JxHwc64B+27bQ3RP+ -ydOc17KXqQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -BjAdBgNVHQ4EFgQUxhfQvKjqAkPyGwaZXSuQILnXnOQwDQYJKoZIhvcNAQELBQAD -ggIBADRh2Va1EodVTd2jNTFGu6QHcrxfYWLopfsLN7E8trP6KZ1/AvWkyaiTt3px -KGmPc+FSkNrVvjrlt3ZqVoAh313m6Tqe5T72omnHKgqwGEfcIHB9UqM+WXzBusnI -FUBhynLWcKzSt/Ac5IYp8M7vaGPQtSCKFWGafoaYtMnCdvvMujAWzKNhxnQT5Wvv -oxXqA/4Ti2Tk08HS6IT7SdEQTXlm66r99I0xHnAUrdzeZxNMgRVhvLfZkXdxGYFg -u/BYpbWcC/ePIlUnwEsBbTuZDdQdm2NnL9DuDcpmvJRPpq3t/O5jrFc/ZSXPsoaP -0Aj/uHYUbt7lJ+yreLVTubY/6CD50qi+YUbKh4yE8/nxoGibIh6BJpsQBJFxwAYf -3KDTuVan45gtf4Od34wrnDKOMpTwATwiKp9Dwi7DmDkHOHv8XgBCH/MyJnmDhPbl -8MFREsALHgQjDFSlTC9JxUrRtm5gDWv8a4uFJGS3iQ6rJUdbPM9+Sb3H6QrG2vd+ -DhcI00iX0HGS8A85PjRqHH3Y8iKuu2n0M7SmSFXRDw4m6Oy2Cy2nhTXN/VnIn9HN -PlopNLk9hM6xZdRZkZFWdSHBd575euFgndOtBBj0fOtek49TSiIp+EgrPk2GrFt/ -ywaZWWDYWGWVjUTR939+J399roD1B0y2PpxxVJkES/1Y+Zj0 ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G2" -# Serial: 15385348160840213938643033620894905419 -# MD5 Fingerprint: 92:38:b9:f8:63:24:82:65:2c:57:33:e6:fe:81:8f:9d -# SHA1 Fingerprint: a1:4b:48:d9:43:ee:0a:0e:40:90:4f:3c:e0:a4:c0:91:93:51:5d:3f -# SHA256 Fingerprint: 7d:05:eb:b6:82:33:9f:8c:94:51:ee:09:4e:eb:fe:fa:79:53:a1:14:ed:b2:f4:49:49:45:2f:ab:7d:2f:c1:85 ------BEGIN CERTIFICATE----- -MIIDljCCAn6gAwIBAgIQC5McOtY5Z+pnI7/Dr5r0SzANBgkqhkiG9w0BAQsFADBl -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv -b3QgRzIwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQG -EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl -cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzIwggEi -MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDZ5ygvUj82ckmIkzTz+GoeMVSA -n61UQbVH35ao1K+ALbkKz3X9iaV9JPrjIgwrvJUXCzO/GU1BBpAAvQxNEP4Htecc -biJVMWWXvdMX0h5i89vqbFCMP4QMls+3ywPgym2hFEwbid3tALBSfK+RbLE4E9Hp -EgjAALAcKxHad3A2m67OeYfcgnDmCXRwVWmvo2ifv922ebPynXApVfSr/5Vh88lA -bx3RvpO704gqu52/clpWcTs/1PPRCv4o76Pu2ZmvA9OPYLfykqGxvYmJHzDNw6Yu -YjOuFgJ3RFrngQo8p0Quebg/BLxcoIfhG69Rjs3sLPr4/m3wOnyqi+RnlTGNAgMB -AAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQW -BBTOw0q5mVXyuNtgv6l+vVa1lzan1jANBgkqhkiG9w0BAQsFAAOCAQEAyqVVjOPI -QW5pJ6d1Ee88hjZv0p3GeDgdaZaikmkuOGybfQTUiaWxMTeKySHMq2zNixya1r9I -0jJmwYrA8y8678Dj1JGG0VDjA9tzd29KOVPt3ibHtX2vK0LRdWLjSisCx1BL4Gni -lmwORGYQRI+tBev4eaymG+g3NJ1TyWGqolKvSnAWhsI6yLETcDbYz+70CjTVW0z9 -B5yiutkBclzzTcHdDrEcDcRjvq30FPuJ7KJBDkzMyFdA0G4Dqs0MjomZmWzwPDCv -ON9vvKO+KSAnq3T/EyJ43pdSVR6DtVQgA+6uwE9W3jfMw3+qBCe703e4YtsXfJwo -IhNzbM8m9Yop5w== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Assured ID Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Assured ID Root G3" -# Serial: 15459312981008553731928384953135426796 -# MD5 Fingerprint: 7c:7f:65:31:0c:81:df:8d:ba:3e:99:e2:5c:ad:6e:fb -# SHA1 Fingerprint: f5:17:a2:4f:9a:48:c6:c9:f8:a2:00:26:9f:dc:0f:48:2c:ab:30:89 -# SHA256 Fingerprint: 7e:37:cb:8b:4c:47:09:0c:ab:36:55:1b:a6:f4:5d:b8:40:68:0f:ba:16:6a:95:2d:b1:00:71:7f:43:05:3f:c2 ------BEGIN CERTIFICATE----- -MIICRjCCAc2gAwIBAgIQC6Fa+h3foLVJRK/NJKBs7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3Qg -RzMwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBlMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgRzMwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAAQZ57ysRGXtzbg/WPuNsVepRC0FFfLvC/8QdJ+1YlJf -Zn4f5dwbRXkLzMZTCp2NXQLZqVneAlr2lSoOjThKiknGvMYDOAdfVdp+CW7if17Q -RSAPWXYQ1qAk8C3eNvJsKTmjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ -BAQDAgGGMB0GA1UdDgQWBBTL0L2p4ZgFUaFNN6KDec6NHSrkhDAKBggqhkjOPQQD -AwNnADBkAjAlpIFFAmsSS3V0T8gj43DydXLefInwz5FyYZ5eEJJZVrmDxxDnOOlY -JjZ91eQ0hjkCMHw2U/Aw5WJjOpnitqM7mzT6HtoQknFekROn3aRukswy1vUhZscv -6pZjamVFkpUBtA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G2 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G2" -# Serial: 4293743540046975378534879503202253541 -# MD5 Fingerprint: e4:a6:8a:c8:54:ac:52:42:46:0a:fd:72:48:1b:2a:44 -# SHA1 Fingerprint: df:3c:24:f9:bf:d6:66:76:1b:26:80:73:fe:06:d1:cc:8d:4f:82:a4 -# SHA256 Fingerprint: cb:3c:cb:b7:60:31:e5:e0:13:8f:8d:d3:9a:23:f9:de:47:ff:c3:5e:43:c1:14:4c:ea:27:d4:6a:5a:b1:cb:5f ------BEGIN CERTIFICATE----- -MIIDjjCCAnagAwIBAgIQAzrx5qcRqaC7KGSxHQn65TANBgkqhkiG9w0BAQsFADBh -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBH -MjAeFw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVT -MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j -b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEcyMIIBIjANBgkqhkiG -9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuzfNNNx7a8myaJCtSnX/RrohCgiN9RlUyfuI -2/Ou8jqJkTx65qsGGmvPrC3oXgkkRLpimn7Wo6h+4FR1IAWsULecYxpsMNzaHxmx -1x7e/dfgy5SDN67sH0NO3Xss0r0upS/kqbitOtSZpLYl6ZtrAGCSYP9PIUkY92eQ -q2EGnI/yuum06ZIya7XzV+hdG82MHauVBJVJ8zUtluNJbd134/tJS7SsVQepj5Wz -tCO7TG1F8PapspUwtP1MVYwnSlcUfIKdzXOS0xZKBgyMUNGPHgm+F6HmIcr9g+UQ -vIOlCsRnKPZzFBQ9RnbDhxSJITRNrw9FDKZJobq7nMWxM4MphQIDAQABo0IwQDAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAdBgNVHQ4EFgQUTiJUIBiV -5uNu5g/6+rkS7QYXjzkwDQYJKoZIhvcNAQELBQADggEBAGBnKJRvDkhj6zHd6mcY -1Yl9PMWLSn/pvtsrF9+wX3N3KjITOYFnQoQj8kVnNeyIv/iPsGEMNKSuIEyExtv4 -NeF22d+mQrvHRAiGfzZ0JFrabA0UWTW98kndth/Jsw1HKj2ZL7tcu7XUIOGZX1NG -Fdtom/DzMNU+MeKNhJ7jitralj41E6Vf8PlwUHBHQRFXGU7Aj64GxJUTFy8bJZ91 -8rGOmaFvE7FBcf6IKshPECBV1/MUReXgRPTqh5Uykw7+U0b6LJ3/iyK5S9kJRaTe -pLiaWN0bfVKfjllDiIGknibVb63dDcY3fe0Dkhvld1927jyNxF1WW6LZZm6zNTfl -MrY= ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Global Root G3 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Global Root G3" -# Serial: 7089244469030293291760083333884364146 -# MD5 Fingerprint: f5:5d:a4:50:a5:fb:28:7e:1e:0f:0d:cc:96:57:56:ca -# SHA1 Fingerprint: 7e:04:de:89:6a:3e:66:6d:00:e6:87:d3:3f:fa:d9:3b:e8:3d:34:9e -# SHA256 Fingerprint: 31:ad:66:48:f8:10:41:38:c7:38:f3:9e:a4:32:01:33:39:3e:3a:18:cc:02:29:6e:f9:7c:2a:c9:ef:67:31:d0 ------BEGIN CERTIFICATE----- -MIICPzCCAcWgAwIBAgIQBVVWvPJepDU1w6QP1atFcjAKBggqhkjOPQQDAzBhMQsw -CQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cu -ZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBHMzAe -Fw0xMzA4MDExMjAwMDBaFw0zODAxMTUxMjAwMDBaMGExCzAJBgNVBAYTAlVTMRUw -EwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5jb20x -IDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IEczMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAE3afZu4q4C/sLfyHS8L6+c/MzXRq8NOrexpu80JX28MzQC7phW1FG -fp4tn+6OYwwX7Adw9c+ELkCDnOg/QW07rdOkFFk2eJ0DQ+4QE2xy3q6Ip6FrtUPO -Z9wj/wMco+I+o0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBhjAd -BgNVHQ4EFgQUs9tIpPmhxdiuNkHMEWNpYim8S8YwCgYIKoZIzj0EAwMDaAAwZQIx -AK288mw/EkrRLTnDCgmXc/SINoyIJ7vmiI1Qhadj+Z4y3maTD/HMsQmP3Wyr+mt/ -oAIwOWZbwmSNuJ5Q3KjVSaLtx9zRSX8XAbjIho9OjIgrqJqpisXRAL34VOKa5Vt8 -sycX ------END CERTIFICATE----- - -# Issuer: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Subject: CN=DigiCert Trusted Root G4 O=DigiCert Inc OU=www.digicert.com -# Label: "DigiCert Trusted Root G4" -# Serial: 7451500558977370777930084869016614236 -# MD5 Fingerprint: 78:f2:fc:aa:60:1f:2f:b4:eb:c9:37:ba:53:2e:75:49 -# SHA1 Fingerprint: dd:fb:16:cd:49:31:c9:73:a2:03:7d:3f:c8:3a:4d:7d:77:5d:05:e4 -# SHA256 Fingerprint: 55:2f:7b:dc:f1:a7:af:9e:6c:e6:72:01:7f:4f:12:ab:f7:72:40:c7:8e:76:1a:c2:03:d1:d9:d2:0a:c8:99:88 ------BEGIN CERTIFICATE----- -MIIFkDCCA3igAwIBAgIQBZsbV56OITLiOQe9p3d1XDANBgkqhkiG9w0BAQwFADBi -MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 -d3cuZGlnaWNlcnQuY29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3Qg -RzQwHhcNMTMwODAxMTIwMDAwWhcNMzgwMTE1MTIwMDAwWjBiMQswCQYDVQQGEwJV -UzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNlcnQu -Y29tMSEwHwYDVQQDExhEaWdpQ2VydCBUcnVzdGVkIFJvb3QgRzQwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQC/5pBzaN675F1KPDAiMGkz7MKnJS7JIT3y -ithZwuEppz1Yq3aaza57G4QNxDAf8xukOBbrVsaXbR2rsnnyyhHS5F/WBTxSD1If -xp4VpX6+n6lXFllVcq9ok3DCsrp1mWpzMpTREEQQLt+C8weE5nQ7bXHiLQwb7iDV -ySAdYyktzuxeTsiT+CFhmzTrBcZe7FsavOvJz82sNEBfsXpm7nfISKhmV1efVFiO -DCu3T6cw2Vbuyntd463JT17lNecxy9qTXtyOj4DatpGYQJB5w3jHtrHEtWoYOAMQ -jdjUN6QuBX2I9YI+EJFwq1WCQTLX2wRzKm6RAXwhTNS8rhsDdV14Ztk6MUSaM0C/ -CNdaSaTC5qmgZ92kJ7yhTzm1EVgX9yRcRo9k98FpiHaYdj1ZXUJ2h4mXaXpI8OCi -EhtmmnTK3kse5w5jrubU75KSOp493ADkRSWJtppEGSt+wJS00mFt6zPZxd9LBADM -fRyVw4/3IbKyEbe7f/LVjHAsQWCqsWMYRJUadmJ+9oCw++hkpjPRiQfhvbfmQ6QY -uKZ3AeEPlAwhHbJUKSWJbOUOUlFHdL4mrLZBdd56rF+NP8m800ERElvlEFDrMcXK -chYiCd98THU/Y+whX8QgUWtvsauGi0/C1kVfnSD8oR7FwI+isX4KJpn15GkvmB0t -9dmpsh3lGwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIB -hjAdBgNVHQ4EFgQU7NfjgtJxXWRM3y5nP+e6mK4cD08wDQYJKoZIhvcNAQEMBQAD -ggIBALth2X2pbL4XxJEbw6GiAI3jZGgPVs93rnD5/ZpKmbnJeFwMDF/k5hQpVgs2 -SV1EY+CtnJYYZhsjDT156W1r1lT40jzBQ0CuHVD1UvyQO7uYmWlrx8GnqGikJ9yd -+SeuMIW59mdNOj6PWTkiU0TryF0Dyu1Qen1iIQqAyHNm0aAFYF/opbSnr6j3bTWc -fFqK1qI4mfN4i/RN0iAL3gTujJtHgXINwBQy7zBZLq7gcfJW5GqXb5JQbZaNaHqa -sjYUegbyJLkJEVDXCLG4iXqEI2FCKeWjzaIgQdfRnGTZ6iahixTXTBmyUEFxPT9N -cCOGDErcgdLMMpSEDQgJlxxPwO5rIHQw0uA5NBCFIRUBCOhVMt5xSdkoF1BN5r5N -0XWs0Mr7QbhDparTwwVETyw2m+L64kW4I1NsBm9nVX9GtUw/bihaeSbSpKhil9Ie -4u1Ki7wb/UdKDd9nZn6yW0HQO+T0O/QEY+nvwlQAUaCKKsnOeMzV6ocEGLPOr0mI -r/OSmbaz5mEP0oUA51Aa5BuVnRmhuZyxm7EAHu/QD09CbMkKvO5D+jpxpchNJqU1 -/YldvIViHTLSoCtU7ZpXwdv6EM8Zt4tKG48BtieVU+i2iW1bvGjUI+iLUaJW+fCm -gKDWHrO8Dw9TdSmq6hN35N6MgSGtBxBHEa2HPQfRdbzP82Z+ ------END CERTIFICATE----- - -# Issuer: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Subject: CN=COMODO RSA Certification Authority O=COMODO CA Limited -# Label: "COMODO RSA Certification Authority" -# Serial: 101909084537582093308941363524873193117 -# MD5 Fingerprint: 1b:31:b0:71:40:36:cc:14:36:91:ad:c4:3e:fd:ec:18 -# SHA1 Fingerprint: af:e5:d2:44:a8:d1:19:42:30:ff:47:9f:e2:f8:97:bb:cd:7a:8c:b4 -# SHA256 Fingerprint: 52:f0:e1:c4:e5:8e:c6:29:29:1b:60:31:7f:07:46:71:b8:5d:7e:a8:0d:5b:07:27:34:63:53:4b:32:b4:02:34 ------BEGIN CERTIFICATE----- -MIIF2DCCA8CgAwIBAgIQTKr5yttjb+Af907YWwOGnTANBgkqhkiG9w0BAQwFADCB -hTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G -A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNV -BAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMTE5 -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgT -EkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMR -Q09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBSU0EgQ2VydGlmaWNh -dGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCR -6FSS0gpWsawNJN3Fz0RndJkrN6N9I3AAcbxT38T6KhKPS38QVr2fcHK3YX/JSw8X -pz3jsARh7v8Rl8f0hj4K+j5c+ZPmNHrZFGvnnLOFoIJ6dq9xkNfs/Q36nGz637CC -9BR++b7Epi9Pf5l/tfxnQ3K9DADWietrLNPtj5gcFKt+5eNu/Nio5JIk2kNrYrhV -/erBvGy2i/MOjZrkm2xpmfh4SDBF1a3hDTxFYPwyllEnvGfDyi62a+pGx8cgoLEf -Zd5ICLqkTqnyg0Y3hOvozIFIQ2dOciqbXL1MGyiKXCJ7tKuY2e7gUYPDCUZObT6Z -+pUX2nwzV0E8jVHtC7ZcryxjGt9XyD+86V3Em69FmeKjWiS0uqlWPc9vqv9JWL7w -qP/0uK3pN/u6uPQLOvnoQ0IeidiEyxPx2bvhiWC4jChWrBQdnArncevPDt09qZah -SL0896+1DSJMwBGB7FY79tOi4lu3sgQiUpWAk2nojkxl8ZEDLXB0AuqLZxUpaVIC -u9ffUGpVRr+goyhhf3DQw6KqLCGqR84onAZFdr+CGCe01a60y1Dma/RMhnEw6abf -Fobg2P9A3fvQQoh/ozM6LlweQRGBY84YcWsr7KaKtzFcOmpH4MN5WdYgGq/yapiq -crxXStJLnbsQ/LBMQeXtHT1eKJ2czL+zUdqnR+WEUwIDAQABo0IwQDAdBgNVHQ4E -FgQUu69+Aj36pvE8hI6t7jiY7NkyMtQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB -/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAArx1UaEt65Ru2yyTUEUAJNMnMvl -wFTPoCWOAvn9sKIN9SCYPBMtrFaisNZ+EZLpLrqeLppysb0ZRGxhNaKatBYSaVqM -4dc+pBroLwP0rmEdEBsqpIt6xf4FpuHA1sj+nq6PK7o9mfjYcwlYRm6mnPTXJ9OV -2jeDchzTc+CiR5kDOF3VSXkAKRzH7JsgHAckaVd4sjn8OoSgtZx8jb8uk2Intzna -FxiuvTwJaP+EmzzV1gsD41eeFPfR60/IvYcjt7ZJQ3mFXLrrkguhxuhoqEwWsRqZ -CuhTLJK7oQkYdQxlqHvLI7cawiiFwxv/0Cti76R7CZGYZ4wUAc1oBmpjIXUDgIiK -boHGhfKppC3n9KUkEEeDys30jXlYsQab5xoq2Z0B15R97QNKyvDb6KkBPvVWmcke -jkk9u+UJueBPSZI9FoJAzMxZxuY67RIuaTxslbH9qh17f4a+Hg4yRvv7E491f0yL -S0Zj/gA0QHDBw7mh3aZw4gSzQbzpgJHqZJx64SIDqZxubw5lT2yHh17zbqD5daWb -QOhTsiedSrnAdyGN/4fy3ryM7xfft0kL0fJuMAsaDk527RH89elWsn2/x20Kk4yl -0MC2Hb46TpSi125sC8KKfPog88Tk5c0NqMuRkrF8hey1FGlmDoLnzc7ILaZRfyHB -NVOFBkpdn627G190 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust RSA Certification Authority O=The USERTRUST Network -# Label: "USERTrust RSA Certification Authority" -# Serial: 2645093764781058787591871645665788717 -# MD5 Fingerprint: 1b:fe:69:d1:91:b7:19:33:a3:72:a8:0f:e1:55:e5:b5 -# SHA1 Fingerprint: 2b:8f:1b:57:33:0d:bb:a2:d0:7a:6c:51:f7:0e:e9:0d:da:b9:ad:8e -# SHA256 Fingerprint: e7:93:c9:b0:2f:d8:aa:13:e2:1c:31:22:8a:cc:b0:81:19:64:3b:74:9c:89:89:64:b1:74:6d:46:c3:d4:cb:d2 ------BEGIN CERTIFICATE----- -MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB -iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl -cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV -BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw -MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV -BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU -aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B -3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY -tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ -Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 -VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT -79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 -c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT -Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l -c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee -UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE -Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd -BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G -A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF -Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO -VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 -ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs -8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR -iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze -Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ -XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ -qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB -VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB -L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG -jjxDah2nGN59PRbxYvnKkKj9 ------END CERTIFICATE----- - -# Issuer: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Subject: CN=USERTrust ECC Certification Authority O=The USERTRUST Network -# Label: "USERTrust ECC Certification Authority" -# Serial: 123013823720199481456569720443997572134 -# MD5 Fingerprint: fa:68:bc:d9:b5:7f:ad:fd:c9:1d:06:83:28:cc:24:c1 -# SHA1 Fingerprint: d1:cb:ca:5d:b2:d5:2a:7f:69:3b:67:4d:e5:f0:5a:1d:0c:95:7d:f0 -# SHA256 Fingerprint: 4f:f4:60:d5:4b:9c:86:da:bf:bc:fc:57:12:e0:40:0d:2b:ed:3f:bc:4d:4f:bd:aa:86:e0:6a:dc:d2:a9:ad:7a ------BEGIN CERTIFICATE----- -MIICjzCCAhWgAwIBAgIQXIuZxVqUxdJxVt7NiYDMJjAKBggqhkjOPQQDAzCBiDEL -MAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNl -eSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMT -JVVTRVJUcnVzdCBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAwMjAx -MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNVBAgT -Ck5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVUaGUg -VVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBFQ0MgQ2VydGlm -aWNhdGlvbiBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQarFRaqflo -I+d61SRvU8Za2EurxtW20eZzca7dnNYMYf3boIkDuAUU7FfO7l0/4iGzzvfUinng -o4N+LZfQYcTxmdwlkWOrfzCjtHDix6EznPO/LlxTsV+zfTJ/ijTjeXmjQjBAMB0G -A1UdDgQWBBQ64QmG1M8ZwpZ2dEl23OA1xmNjmjAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjA2Z6EWCNzklwBBHU6+4WMB -zzuqQhFkoJ2UOQIReVx7Hfpkue4WQrO/isIJxOzksU0CMQDpKmFHjFJKS04YcPbW -RNZu9YO6bVi9JNlWSOrvxKJGgYhqOkbRqZtNyWHa0V1Xahg= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R5 -# Label: "GlobalSign ECC Root CA - R5" -# Serial: 32785792099990507226680698011560947931244 -# MD5 Fingerprint: 9f:ad:3b:1c:02:1e:8a:ba:17:74:38:81:0c:a2:bc:08 -# SHA1 Fingerprint: 1f:24:c6:30:cd:a4:18:ef:20:69:ff:ad:4f:dd:5f:46:3a:1b:69:aa -# SHA256 Fingerprint: 17:9f:bc:14:8a:3d:d0:0f:d2:4e:a1:34:58:cc:43:bf:a7:f5:9c:81:82:d7:83:a5:13:f6:eb:ec:10:0c:89:24 ------BEGIN CERTIFICATE----- -MIICHjCCAaSgAwIBAgIRYFlJ4CYuu1X5CneKcflK2GwwCgYIKoZIzj0EAwMwUDEk -MCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBDQSAtIFI1MRMwEQYDVQQKEwpH -bG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWduMB4XDTEyMTExMzAwMDAwMFoX -DTM4MDExOTAzMTQwN1owUDEkMCIGA1UECxMbR2xvYmFsU2lnbiBFQ0MgUm9vdCBD -QSAtIFI1MRMwEQYDVQQKEwpHbG9iYWxTaWduMRMwEQYDVQQDEwpHbG9iYWxTaWdu -MHYwEAYHKoZIzj0CAQYFK4EEACIDYgAER0UOlvt9Xb/pOdEh+J8LttV7HpI6SFkc -8GIxLcB6KP4ap1yztsyX50XUWPrRd21DosCHZTQKH3rd6zwzocWdTaRvQZU4f8ke -hOvRnkmSh5SHDDqFSmafnVmTTZdhBoZKo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUPeYpSJvqB8ohREom3m7e0oPQn1kwCgYI -KoZIzj0EAwMDaAAwZQIxAOVpEslu28YxuglB4Zf4+/2a4n0Sye18ZNPLBSWLVtmg -515dTguDnFt2KaAJJiFqYgIwcdK1j1zqO+F4CYWodZI7yFz9SO8NdCKoCOJuxUnO -xwy8p2Fp8fc74SrL+SvzZpA3 ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Commercial Root CA 1 O=IdenTrust -# Label: "IdenTrust Commercial Root CA 1" -# Serial: 13298821034946342390520003877796839426 -# MD5 Fingerprint: b3:3e:77:73:75:ee:a0:d3:e3:7e:49:63:49:59:bb:c7 -# SHA1 Fingerprint: df:71:7e:aa:4a:d9:4e:c9:55:84:99:60:2d:48:de:5f:bc:f0:3a:25 -# SHA256 Fingerprint: 5d:56:49:9b:e4:d2:e0:8b:cf:ca:d0:8a:3e:38:72:3d:50:50:3b:de:70:69:48:e4:2f:55:60:30:19:e5:28:ae ------BEGIN CERTIFICATE----- -MIIFYDCCA0igAwIBAgIQCgFCgAAAAUUjyES1AAAAAjANBgkqhkiG9w0BAQsFADBK -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScwJQYDVQQDEx5JZGVu -VHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwHhcNMTQwMTE2MTgxMjIzWhcNMzQw -MTE2MTgxMjIzWjBKMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MScw -JQYDVQQDEx5JZGVuVHJ1c3QgQ29tbWVyY2lhbCBSb290IENBIDEwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQCnUBneP5k91DNG8W9RYYKyqU+PZ4ldhNlT -3Qwo2dfw/66VQ3KZ+bVdfIrBQuExUHTRgQ18zZshq0PirK1ehm7zCYofWjK9ouuU -+ehcCuz/mNKvcbO0U59Oh++SvL3sTzIwiEsXXlfEU8L2ApeN2WIrvyQfYo3fw7gp -S0l4PJNgiCL8mdo2yMKi1CxUAGc1bnO/AljwpN3lsKImesrgNqUZFvX9t++uP0D1 -bVoE/c40yiTcdCMbXTMTEl3EASX2MN0CXZ/g1Ue9tOsbobtJSdifWwLziuQkkORi -T0/Br4sOdBeo0XKIanoBScy0RnnGF7HamB4HWfp1IYVl3ZBWzvurpWCdxJ35UrCL -vYf5jysjCiN2O/cz4ckA82n5S6LgTrx+kzmEB/dEcH7+B1rlsazRGMzyNeVJSQjK -Vsk9+w8YfYs7wRPCTY/JTw436R+hDmrfYi7LNQZReSzIJTj0+kuniVyc0uMNOYZK -dHzVWYfCP04MXFL0PfdSgvHqo6z9STQaKPNBiDoT7uje/5kdX7rL6B7yuVBgwDHT -c+XvvqDtMwt0viAgxGds8AgDelWAf0ZOlqf0Hj7h9tgJ4TNkK2PXMl6f+cB7D3hv -l7yTmvmcEpB4eoCHFddydJxVdHixuuFucAS6T6C6aMN7/zHwcz09lCqxC0EOoP5N -iGVreTO01wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB -/zAdBgNVHQ4EFgQU7UQZwNPwBovupHu+QucmVMiONnYwDQYJKoZIhvcNAQELBQAD -ggIBAA2ukDL2pkt8RHYZYR4nKM1eVO8lvOMIkPkp165oCOGUAFjvLi5+U1KMtlwH -6oi6mYtQlNeCgN9hCQCTrQ0U5s7B8jeUeLBfnLOic7iPBZM4zY0+sLj7wM+x8uwt -LRvM7Kqas6pgghstO8OEPVeKlh6cdbjTMM1gCIOQ045U8U1mwF10A0Cj7oV+wh93 -nAbowacYXVKV7cndJZ5t+qntozo00Fl72u1Q8zW/7esUTTHHYPTa8Yec4kjixsU3 -+wYQ+nVZZjFHKdp2mhzpgq7vmrlR94gjmmmVYjzlVYA211QC//G5Xc7UI2/YRYRK -W2XviQzdFKcgyxilJbQN+QHwotL0AMh0jqEqSI5l2xPE4iUXfeu+h1sXIFRRk0pT -AwvsXcoz7WL9RccvW9xYoIA55vrX/hMUpu09lEpCdNTDd1lzzY9GvlU47/rokTLq -l1gEIt44w8y8bckzOmoKaT+gyOpyj4xjhiO9bTyWnpXgSUyqorkqG5w2gXjtw+hG -4iZZRHUe2XWJUc0QhJ1hYMtd+ZciTY6Y5uN/9lu7rs3KSoFrXgvzUeF0K+l+J6fZ -mUlO+KWA2yUPHGNiiskzZ2s8EIPGrd6ozRaOjfAHN3Gf8qv8QfXBi+wAN10J5U6A -7/qxXDgGpRtK4dw4LTzcqx+QGtVKnO7RcGzM7vRX+Bi6hG6H ------END CERTIFICATE----- - -# Issuer: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Subject: CN=IdenTrust Public Sector Root CA 1 O=IdenTrust -# Label: "IdenTrust Public Sector Root CA 1" -# Serial: 13298821034946342390521976156843933698 -# MD5 Fingerprint: 37:06:a5:b0:fc:89:9d:ba:f4:6b:8c:1a:64:cd:d5:ba -# SHA1 Fingerprint: ba:29:41:60:77:98:3f:f4:f3:ef:f2:31:05:3b:2e:ea:6d:4d:45:fd -# SHA256 Fingerprint: 30:d0:89:5a:9a:44:8a:26:20:91:63:55:22:d1:f5:20:10:b5:86:7a:ca:e1:2c:78:ef:95:8f:d4:f4:38:9f:2f ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCgFCgAAAAUUjz0Z8AAAAAjANBgkqhkiG9w0BAQsFADBN -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0MSowKAYDVQQDEyFJZGVu -VHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwHhcNMTQwMTE2MTc1MzMyWhcN -MzQwMTE2MTc1MzMyWjBNMQswCQYDVQQGEwJVUzESMBAGA1UEChMJSWRlblRydXN0 -MSowKAYDVQQDEyFJZGVuVHJ1c3QgUHVibGljIFNlY3RvciBSb290IENBIDEwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2IpT8pEiv6EdrCvsnduTyP4o7 -ekosMSqMjbCpwzFrqHd2hCa2rIFCDQjrVVi7evi8ZX3yoG2LqEfpYnYeEe4IFNGy -RBb06tD6Hi9e28tzQa68ALBKK0CyrOE7S8ItneShm+waOh7wCLPQ5CQ1B5+ctMlS -bdsHyo+1W/CD80/HLaXIrcuVIKQxKFdYWuSNG5qrng0M8gozOSI5Cpcu81N3uURF -/YTLNiCBWS2ab21ISGHKTN9T0a9SvESfqy9rg3LvdYDaBjMbXcjaY8ZNzaxmMc3R -3j6HEDbhuaR672BQssvKplbgN6+rNBM5Jeg5ZuSYeqoSmJxZZoY+rfGwyj4GD3vw -EUs3oERte8uojHH01bWRNszwFcYr3lEXsZdMUD2xlVl8BX0tIdUAvwFnol57plzy -9yLxkA2T26pEUWbMfXYD62qoKjgZl3YNa4ph+bz27nb9cCvdKTz4Ch5bQhyLVi9V -GxyhLrXHFub4qjySjmm2AcG1hp2JDws4lFTo6tyePSW8Uybt1as5qsVATFSrsrTZ -2fjXctscvG29ZV/viDUqZi/u9rNl8DONfJhBaUYPQxxp+pu10GFqzcpL2UyQRqsV -WaFHVCkugyhfHMKiq3IXAAaOReyL4jM9f9oZRORicsPfIsbyVtTdX5Vy7W1f90gD -W/3FKqD2cyOEEBsB5wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQU43HgntinQtnbcZFrlJPrw6PRFKMwDQYJKoZIhvcN -AQELBQADggIBAEf63QqwEZE4rU1d9+UOl1QZgkiHVIyqZJnYWv6IAcVYpZmxI1Qj -t2odIFflAWJBF9MJ23XLblSQdf4an4EKwt3X9wnQW3IV5B4Jaj0z8yGa5hV+rVHV -DRDtfULAj+7AmgjVQdZcDiFpboBhDhXAuM/FSRJSzL46zNQuOAXeNf0fb7iAaJg9 -TaDKQGXSc3z1i9kKlT/YPyNtGtEqJBnZhbMX73huqVjRI9PHE+1yJX9dsXNw0H8G -lwmEKYBhHfpe/3OsoOOJuBxxFcbeMX8S3OFtm6/n6J91eEyrRjuazr8FGF1NFTwW -mhlQBJqymm9li1JfPFgEKCXAZmExfrngdbkaqIHWchezxQMxNRF4eKLg6TCMf4Df -WN88uieW4oA0beOY02QnrEh+KHdcxiVhJfiFDGX6xDIvpZgF5PgLZxYWxoK4Mhn5 -+bl53B/N66+rDt0b20XkeucC4pVd/GnwU2lhlXV5C15V5jgclKlZM57IcXR5f1GJ -tshquDDIajjDbp7hNxbqBWJMWxJH7ae0s1hWx0nzfxJoCTFx8G34Tkf71oXuxVhA -GaQdp/lLQzfcaFpPz+vCZHTetBXZ9FRUGi8c15dxVJCO2SCdUyt/q4/i6jC8UDfv -8Ue1fXwsBOxonbRJRBD0ckscZOf85muQ3Wl9af0AVqW3rLatt8o+Ae+c ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G2 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2009 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G2" -# Serial: 1246989352 -# MD5 Fingerprint: 4b:e2:c9:91:96:65:0c:f4:0e:5a:93:92:a0:0a:fe:b2 -# SHA1 Fingerprint: 8c:f4:27:fd:79:0c:3a:d1:66:06:8d:e8:1e:57:ef:bb:93:22:72:d4 -# SHA256 Fingerprint: 43:df:57:74:b0:3e:7f:ef:5f:e4:0d:93:1a:7b:ed:f1:bb:2e:6b:42:73:8c:4e:6d:38:41:10:3d:3a:a7:f3:39 ------BEGIN CERTIFICATE----- -MIIEPjCCAyagAwIBAgIESlOMKDANBgkqhkiG9w0BAQsFADCBvjELMAkGA1UEBhMC -VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50 -cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3Qs -IEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVz -dCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRzIwHhcNMDkwNzA3MTcy -NTU0WhcNMzAxMjA3MTc1NTU0WjCBvjELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUVu -dHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3d3cuZW50cnVzdC5uZXQvbGVnYWwt -dGVybXMxOTA3BgNVBAsTMChjKSAyMDA5IEVudHJ1c3QsIEluYy4gLSBmb3IgYXV0 -aG9yaXplZCB1c2Ugb25seTEyMDAGA1UEAxMpRW50cnVzdCBSb290IENlcnRpZmlj -YXRpb24gQXV0aG9yaXR5IC0gRzIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK -AoIBAQC6hLZy254Ma+KZ6TABp3bqMriVQRrJ2mFOWHLP/vaCeb9zYQYKpSfYs1/T -RU4cctZOMvJyig/3gxnQaoCAAEUesMfnmr8SVycco2gvCoe9amsOXmXzHHfV1IWN -cCG0szLni6LVhjkCsbjSR87kyUnEO6fe+1R9V77w6G7CebI6C1XiUJgWMhNcL3hW -wcKUs/Ja5CeanyTXxuzQmyWC48zCxEXFjJd6BmsqEZ+pCm5IO2/b1BEZQvePB7/1 -U1+cPvQXLOZprE4yTGJ36rfo5bs0vBmLrpxR57d+tVOxMyLlbc9wPBr64ptntoP0 -jaWvYkxN4FisZDQSA/i2jZRjJKRxAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRqciZ60B7vfec7aVHUbI2fkBJmqzAN -BgkqhkiG9w0BAQsFAAOCAQEAeZ8dlsa2eT8ijYfThwMEYGprmi5ZiXMRrEPR9RP/ -jTkrwPK9T3CMqS/qF8QLVJ7UG5aYMzyorWKiAHarWWluBh1+xLlEjZivEtRh2woZ -Rkfz6/djwUAFQKXSt/S1mja/qYh2iARVBCuch38aNzx+LaUa2NSJXsq9rD1s2G2v -1fN2D807iDginWyTmsQ9v4IbZT+mD12q/OWyFcq1rca8PdCE6OoGcrBNOTJ4vz4R -nAuknZoh8/CbCzB428Hch0P+vGOaysXCHMnHjf87ElgI5rY97HosTvuDls4MPGmH -VHOkc8KT/1EQrBVUAdj8BbGJoX90g5pJ19xOe4pIb4tF9g== ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - EC1 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2012 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - EC1" -# Serial: 51543124481930649114116133369 -# MD5 Fingerprint: b6:7e:1d:f0:58:c5:49:6c:24:3b:3d:ed:98:18:ed:bc -# SHA1 Fingerprint: 20:d8:06:40:df:9b:25:f5:12:25:3a:11:ea:f7:59:8a:eb:14:b5:47 -# SHA256 Fingerprint: 02:ed:0e:b2:8c:14:da:45:16:5c:56:67:91:70:0d:64:51:d7:fb:56:f0:b2:ab:1d:3b:8e:b0:70:e5:6e:df:f5 ------BEGIN CERTIFICATE----- -MIIC+TCCAoCgAwIBAgINAKaLeSkAAAAAUNCR+TAKBggqhkjOPQQDAzCBvzELMAkG -A1UEBhMCVVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xKDAmBgNVBAsTH1NlZSB3 -d3cuZW50cnVzdC5uZXQvbGVnYWwtdGVybXMxOTA3BgNVBAsTMChjKSAyMDEyIEVu -dHJ1c3QsIEluYy4gLSBmb3IgYXV0aG9yaXplZCB1c2Ugb25seTEzMDEGA1UEAxMq -RW50cnVzdCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IC0gRUMxMB4XDTEy -MTIxODE1MjUzNloXDTM3MTIxODE1NTUzNlowgb8xCzAJBgNVBAYTAlVTMRYwFAYD -VQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1c3QubmV0 -L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxMiBFbnRydXN0LCBJbmMuIC0g -Zm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMzAxBgNVBAMTKkVudHJ1c3QgUm9vdCBD -ZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEVDMTB2MBAGByqGSM49AgEGBSuBBAAi -A2IABIQTydC6bUF74mzQ61VfZgIaJPRbiWlH47jCffHyAsWfoPZb1YsGGYZPUxBt -ByQnoaD41UcZYUx9ypMn6nQM72+WCf5j7HBdNq1nd67JnXxVRDqiY1Ef9eNi1KlH -Bz7MIKNCMEAwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0O -BBYEFLdj5xrdjekIplWDpOBqUEFlEUJJMAoGCCqGSM49BAMDA2cAMGQCMGF52OVC -R98crlOZF7ZvHH3hvxGU0QOIdeSNiaSKd0bebWHvAvX7td/M/k7//qnmpwIwW5nX -hTcGtXsI/esni0qU+eH6p44mCOh8kmhtc9hvJqwhAriZtyZBWyVgrtBIGu4G ------END CERTIFICATE----- - -# Issuer: CN=CFCA EV ROOT O=China Financial Certification Authority -# Subject: CN=CFCA EV ROOT O=China Financial Certification Authority -# Label: "CFCA EV ROOT" -# Serial: 407555286 -# MD5 Fingerprint: 74:e1:b6:ed:26:7a:7a:44:30:33:94:ab:7b:27:81:30 -# SHA1 Fingerprint: e2:b8:29:4b:55:84:ab:6b:58:c2:90:46:6c:ac:3f:b8:39:8f:84:83 -# SHA256 Fingerprint: 5c:c3:d7:8e:4e:1d:5e:45:54:7a:04:e6:87:3e:64:f9:0c:f9:53:6d:1c:cc:2e:f8:00:f3:55:c4:c5:fd:70:fd ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIEGErM1jANBgkqhkiG9w0BAQsFADBWMQswCQYDVQQGEwJD -TjEwMC4GA1UECgwnQ2hpbmEgRmluYW5jaWFsIENlcnRpZmljYXRpb24gQXV0aG9y -aXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJPT1QwHhcNMTIwODA4MDMwNzAxWhcNMjkx -MjMxMDMwNzAxWjBWMQswCQYDVQQGEwJDTjEwMC4GA1UECgwnQ2hpbmEgRmluYW5j -aWFsIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MRUwEwYDVQQDDAxDRkNBIEVWIFJP -T1QwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDXXWvNED8fBVnVBU03 -sQ7smCuOFR36k0sXgiFxEFLXUWRwFsJVaU2OFW2fvwwbwuCjZ9YMrM8irq93VCpL -TIpTUnrD7i7es3ElweldPe6hL6P3KjzJIx1qqx2hp/Hz7KDVRM8Vz3IvHWOX6Jn5 -/ZOkVIBMUtRSqy5J35DNuF++P96hyk0g1CXohClTt7GIH//62pCfCqktQT+x8Rgp -7hZZLDRJGqgG16iI0gNyejLi6mhNbiyWZXvKWfry4t3uMCz7zEasxGPrb382KzRz -EpR/38wmnvFyXVBlWY9ps4deMm/DGIq1lY+wejfeWkU7xzbh72fROdOXW3NiGUgt -hxwG+3SYIElz8AXSG7Ggo7cbcNOIabla1jj0Ytwli3i/+Oh+uFzJlU9fpy25IGvP -a931DfSCt/SyZi4QKPaXWnuWFo8BGS1sbn85WAZkgwGDg8NNkt0yxoekN+kWzqot -aK8KgWU6cMGbrU1tVMoqLUuFG7OA5nBFDWteNfB/O7ic5ARwiRIlk9oKmSJgamNg -TnYGmE69g60dWIolhdLHZR4tjsbftsbhf4oEIRUpdPA+nJCdDC7xij5aqgwJHsfV -PKPtl8MeNPo4+QgO48BdK4PRVmrJtqhUUy54Mmc9gn900PvhtgVguXDbjgv5E1hv -cWAQUhC5wUEJ73IfZzF4/5YFjQIDAQABo2MwYTAfBgNVHSMEGDAWgBTj/i39KNAL -tbq2osS/BqoFjJP7LzAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAd -BgNVHQ4EFgQU4/4t/SjQC7W6tqLEvwaqBYyT+y8wDQYJKoZIhvcNAQELBQADggIB -ACXGumvrh8vegjmWPfBEp2uEcwPenStPuiB/vHiyz5ewG5zz13ku9Ui20vsXiObT -ej/tUxPQ4i9qecsAIyjmHjdXNYmEwnZPNDatZ8POQQaIxffu2Bq41gt/UP+TqhdL -jOztUmCypAbqTuv0axn96/Ua4CUqmtzHQTb3yHQFhDmVOdYLO6Qn+gjYXB74BGBS -ESgoA//vU2YApUo0FmZ8/Qmkrp5nGm9BC2sGE5uPhnEFtC+NiWYzKXZUmhH4J/qy -P5Hgzg0b8zAarb8iXRvTvyUFTeGSGn+ZnzxEk8rUQElsgIfXBDrDMlI1Dlb4pd19 -xIsNER9Tyx6yF7Zod1rg1MvIB671Oi6ON7fQAUtDKXeMOZePglr4UeWJoBjnaH9d -Ci77o0cOPaYjesYBx4/IXr9tgFa+iiS6M+qf4TIRnvHST4D2G0CvOJ4RUHlzEhLN -5mydLIhyPDCBBpEi6lmt2hkuIsKNuYyH4Ga8cyNfIWRjgEj1oDwYPZTISEEdQLpe -/v5WOaHIz16eGWRGENoXkbcFgKyLmZJ956LYBws2J+dIeWCKw9cTXPhyQN9Ky8+Z -AAoACxGV2lZFA4gKn2fQ1XmxqI1AbQ3CekD6819kR5LLU7m7Wc5P/dAVUwHY3+vZ -5nbv0CO7O6l5s9UCKc2Jo5YPSjXnTkLAdc0Hz+Ys63su ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GB CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GB CA" -# Serial: 157768595616588414422159278966750757568 -# MD5 Fingerprint: a4:eb:b9:61:28:2e:b7:2f:98:b0:35:26:90:99:51:1d -# SHA1 Fingerprint: 0f:f9:40:76:18:d3:d7:6a:4b:98:f0:a8:35:9e:0c:fd:27:ac:cc:ed -# SHA256 Fingerprint: 6b:9c:08:e8:6e:b0:f7:67:cf:ad:65:cd:98:b6:21:49:e5:49:4a:67:f5:84:5e:7b:d1:ed:01:9f:27:b8:6b:d6 ------BEGIN CERTIFICATE----- -MIIDtTCCAp2gAwIBAgIQdrEgUnTwhYdGs/gjGvbCwDANBgkqhkiG9w0BAQsFADBt -MQswCQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUg -Rm91bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9i -YWwgUm9vdCBHQiBDQTAeFw0xNDEyMDExNTAwMzJaFw0zOTEyMDExNTEwMzFaMG0x -CzAJBgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBG -b3VuZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2Jh -bCBSb290IEdCIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2Be3 -HEokKtaXscriHvt9OO+Y9bI5mE4nuBFde9IllIiCFSZqGzG7qFshISvYD06fWvGx -WuR51jIjK+FTzJlFXHtPrby/h0oLS5daqPZI7H17Dc0hBt+eFf1Biki3IPShehtX -1F1Q/7pn2COZH8g/497/b1t3sWtuuMlk9+HKQUYOKXHQuSP8yYFfTvdv37+ErXNk -u7dCjmn21HYdfp2nuFeKUWdy19SouJVUQHMD9ur06/4oQnc/nSMbsrY9gBQHTC5P -99UKFg29ZkM3fiNDecNAhvVMKdqOmq0NpQSHiB6F4+lT1ZvIiwNjeOvgGUpuuy9r -M2RYk61pv48b74JIxwIDAQABo1EwTzALBgNVHQ8EBAMCAYYwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUNQ/INmNe4qPs+TtmFc5RUuORmj0wEAYJKwYBBAGCNxUB -BAMCAQAwDQYJKoZIhvcNAQELBQADggEBAEBM+4eymYGQfp3FsLAmzYh7KzKNbrgh -cViXfa43FK8+5/ea4n32cZiZBKpDdHij40lhPnOMTZTg+XHEthYOU3gf1qKHLwI5 -gSk8rxWYITD+KJAAjNHhy/peyP34EEY7onhCkRd0VQreUGdNZtGn//3ZwLWoo4rO -ZvUPQ82nK1d7Y0Zqqi5S2PTt4W2tKZB4SLrhI6qjiey1q5bAtEuiHZeeevJuQHHf -aPFlTc58Bd9TZaml8LGXBHAVRgOY1NK/VLSgWH1Sb9pWJmLU2NuJMW8c8CLC02Ic -Nc1MaRVUGpCY3useX8p3x8uOPUNpnJpY0CQ73xtAln41rYHHTnG6iBM= ------END CERTIFICATE----- - -# Issuer: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Subject: CN=SZAFIR ROOT CA2 O=Krajowa Izba Rozliczeniowa S.A. -# Label: "SZAFIR ROOT CA2" -# Serial: 357043034767186914217277344587386743377558296292 -# MD5 Fingerprint: 11:64:c1:89:b0:24:b1:8c:b1:07:7e:89:9e:51:9e:99 -# SHA1 Fingerprint: e2:52:fa:95:3f:ed:db:24:60:bd:6e:28:f3:9c:cc:cf:5e:b3:3f:de -# SHA256 Fingerprint: a1:33:9d:33:28:1a:0b:56:e5:57:d3:d3:2b:1c:e7:f9:36:7e:b0:94:bd:5f:a7:2a:7e:50:04:c8:de:d7:ca:fe ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUPopdB+xV0jLVt+O2XwHrLdzk1uQwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCUEwxKDAmBgNVBAoMH0tyYWpvd2EgSXpiYSBSb3psaWN6 -ZW5pb3dhIFMuQS4xGDAWBgNVBAMMD1NaQUZJUiBST09UIENBMjAeFw0xNTEwMTkw -NzQzMzBaFw0zNTEwMTkwNzQzMzBaMFExCzAJBgNVBAYTAlBMMSgwJgYDVQQKDB9L -cmFqb3dhIEl6YmEgUm96bGljemVuaW93YSBTLkEuMRgwFgYDVQQDDA9TWkFGSVIg -Uk9PVCBDQTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC3vD5QqEvN -QLXOYeeWyrSh2gwisPq1e3YAd4wLz32ohswmUeQgPYUM1ljj5/QqGJ3a0a4m7utT -3PSQ1hNKDJA8w/Ta0o4NkjrcsbH/ON7Dui1fgLkCvUqdGw+0w8LBZwPd3BucPbOw -3gAeqDRHu5rr/gsUvTaE2g0gv/pby6kWIK05YO4vdbbnl5z5Pv1+TW9NL++IDWr6 -3fE9biCloBK0TXC5ztdyO4mTp4CEHCdJckm1/zuVnsHMyAHs6A6KCpbns6aH5db5 -BSsNl0BwPLqsdVqc1U2dAgrSS5tmS0YHF2Wtn2yIANwiieDhZNRnvDF5YTy7ykHN -XGoAyDw4jlivAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBQuFqlKGLXLzPVvUPMjX/hd56zwyDANBgkqhkiG9w0BAQsF -AAOCAQEAtXP4A9xZWx126aMqe5Aosk3AM0+qmrHUuOQn/6mWmc5G4G18TKI4pAZw -8PRBEew/R40/cof5O/2kbytTAOD/OblqBw7rHRz2onKQy4I9EYKL0rufKq8h5mOG -nXkZ7/e7DDWQw4rtTw/1zBLZpD67oPwglV9PJi8RI4NOdQcPv5vRtB3pEAT+ymCP -oky4rc/hkA/NrgrHXXu3UNLUYfrVFdvXn4dRVOul4+vJhaAlIDf7js4MNIThPIGy -d05DpYhfhmehPea0XGG2Ptv+tyjFogeutcrKjSoS75ftwjCkySp6+/NNIxuZMzSg -LvWpCz/UXeHPhJ/iGcJfitYgHuNztw== ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Network CA 2 O=Unizeto Technologies S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Network CA 2" -# Serial: 44979900017204383099463764357512596969 -# MD5 Fingerprint: 6d:46:9e:d9:25:6d:08:23:5b:5e:74:7d:1e:27:db:f2 -# SHA1 Fingerprint: d3:dd:48:3e:2b:bf:4c:05:e8:af:10:f5:fa:76:26:cf:d3:dc:30:92 -# SHA256 Fingerprint: b6:76:f2:ed:da:e8:77:5c:d3:6c:b0:f6:3c:d1:d4:60:39:61:f4:9e:62:65:ba:01:3a:2f:03:07:b6:d0:b8:04 ------BEGIN CERTIFICATE----- -MIIF0jCCA7qgAwIBAgIQIdbQSk8lD8kyN/yqXhKN6TANBgkqhkiG9w0BAQ0FADCB -gDELMAkGA1UEBhMCUEwxIjAgBgNVBAoTGVVuaXpldG8gVGVjaG5vbG9naWVzIFMu -QS4xJzAlBgNVBAsTHkNlcnR1bSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTEkMCIG -A1UEAxMbQ2VydHVtIFRydXN0ZWQgTmV0d29yayBDQSAyMCIYDzIwMTExMDA2MDgz -OTU2WhgPMjA0NjEwMDYwODM5NTZaMIGAMQswCQYDVQQGEwJQTDEiMCAGA1UEChMZ -VW5pemV0byBUZWNobm9sb2dpZXMgUy5BLjEnMCUGA1UECxMeQ2VydHVtIENlcnRp -ZmljYXRpb24gQXV0aG9yaXR5MSQwIgYDVQQDExtDZXJ0dW0gVHJ1c3RlZCBOZXR3 -b3JrIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC9+Xj45tWA -DGSdhhuWZGc/IjoedQF97/tcZ4zJzFxrqZHmuULlIEub2pt7uZld2ZuAS9eEQCsn -0+i6MLs+CRqnSZXvK0AkwpfHp+6bJe+oCgCXhVqqndwpyeI1B+twTUrWwbNWuKFB -OJvR+zF/j+Bf4bE/D44WSWDXBo0Y+aomEKsq09DRZ40bRr5HMNUuctHFY9rnY3lE -fktjJImGLjQ/KUxSiyqnwOKRKIm5wFv5HdnnJ63/mgKXwcZQkpsCLL2puTRZCr+E -Sv/f/rOf69me4Jgj7KZrdxYq28ytOxykh9xGc14ZYmhFV+SQgkK7QtbwYeDBoz1m -o130GO6IyY0XRSmZMnUCMe4pJshrAua1YkV/NxVaI2iJ1D7eTiew8EAMvE0Xy02i -sx7QBlrd9pPPV3WZ9fqGGmd4s7+W/jTcvedSVuWz5XV710GRBdxdaeOVDUO5/IOW -OZV7bIBaTxNyxtd9KXpEulKkKtVBRgkg/iKgtlswjbyJDNXXcPiHUv3a76xRLgez -Tv7QCdpw75j6VuZt27VXS9zlLCUVyJ4ueE742pyehizKV/Ma5ciSixqClnrDvFAS -adgOWkaLOusm+iPJtrCBvkIApPjW/jAux9JG9uWOdf3yzLnQh1vMBhBgu4M1t15n -3kfsmUjxpKEV/q2MYo45VU85FrmxY53/twIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBS2oVQ5AsOgP46KvPrU+Bym0ToO/TAOBgNVHQ8BAf8EBAMC -AQYwDQYJKoZIhvcNAQENBQADggIBAHGlDs7k6b8/ONWJWsQCYftMxRQXLYtPU2sQ -F/xlhMcQSZDe28cmk4gmb3DWAl45oPePq5a1pRNcgRRtDoGCERuKTsZPpd1iHkTf -CVn0W3cLN+mLIMb4Ck4uWBzrM9DPhmDJ2vuAL55MYIR4PSFk1vtBHxgP58l1cb29 -XN40hz5BsA72udY/CROWFC/emh1auVbONTqwX3BNXuMp8SMoclm2q8KMZiYcdywm -djWLKKdpoPk79SPdhRB0yZADVpHnr7pH1BKXESLjokmUbOe3lEu6LaTaM4tMpkT/ -WjzGHWTYtTHkpjx6qFcL2+1hGsvxznN3Y6SHb0xRONbkX8eftoEq5IVIeVheO/jb -AoJnwTnbw3RLPTYe+SmTiGhbqEQZIfCn6IENLOiTNrQ3ssqwGyZ6miUfmpqAnksq -P/ujmv5zMnHCnsZy4YpoJ/HkD7TETKVhk/iXEAcqMCWpuchxuO9ozC1+9eB+D4Ko -b7a6bINDd82Kkhehnlt4Fj1F4jNy3eFmypnTycUm/Q1oBEauttmbjL4ZvrHG8hnj -XALKLNhvSgfZyTXaQHXyxKcZb55CEJh15pWLYLztxRLXis7VmFxWlgPF7ncGNf/P -5O4/E2Hu29othfDNrp2yGAlFw5Khchf8R7agCyzxxN5DaAhqXzvwdmP7zAYspsbi -DrW5viSP ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: ca:ff:e2:db:03:d9:cb:4b:e9:0f:ad:84:fd:7b:18:ce -# SHA1 Fingerprint: 01:0c:06:95:a6:98:19:14:ff:bf:5f:c6:b0:b6:95:ea:29:e9:12:a6 -# SHA256 Fingerprint: a0:40:92:9a:02:ce:53:b4:ac:f4:f2:ff:c6:98:1c:e4:49:6f:75:5e:6d:45:fe:0b:2a:69:2b:cd:52:52:3f:36 ------BEGIN CERTIFICATE----- -MIIGCzCCA/OgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBpjELMAkGA1UEBhMCR1Ix -DzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5k -IFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxQDA+BgNVBAMT -N0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgUm9v -dENBIDIwMTUwHhcNMTUwNzA3MTAxMTIxWhcNNDAwNjMwMTAxMTIxWjCBpjELMAkG -A1UEBhMCR1IxDzANBgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNh -ZGVtaWMgYW5kIFJlc2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkx -QDA+BgNVBAMTN0hlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1 -dGlvbnMgUm9vdENBIDIwMTUwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC -AQDC+Kk/G4n8PDwEXT2QNrCROnk8ZlrvbTkBSRq0t89/TSNTt5AA4xMqKKYx8ZEA -4yjsriFBzh/a/X0SWwGDD7mwX5nh8hKDgE0GPt+sr+ehiGsxr/CL0BgzuNtFajT0 -AoAkKAoCFZVedioNmToUW/bLy1O8E00BiDeUJRtCvCLYjqOWXjrZMts+6PAQZe10 -4S+nfK8nNLspfZu2zwnI5dMK/IhlZXQK3HMcXM1AsRzUtoSMTFDPaI6oWa7CJ06C -ojXdFPQf/7J31Ycvqm59JCfnxssm5uX+Zwdj2EUN3TpZZTlYepKZcj2chF6IIbjV -9Cz82XBST3i4vTwri5WY9bPRaM8gFH5MXF/ni+X1NYEZN9cRCLdmvtNKzoNXADrD -gfgXy5I2XdGj2HUb4Ysn6npIQf1FGQatJ5lOwXBH3bWfgVMS5bGMSF0xQxfjjMZ6 -Y5ZLKTBOhE5iGV48zpeQpX8B653g+IuJ3SWYPZK2fu/Z8VFRfS0myGlZYeCsargq -NhEEelC9MoS+L9xy1dcdFkfkR2YgP/SWxa+OAXqlD3pk9Q0Yh9muiNX6hME6wGko -LfINaFGq46V3xqSQDqE3izEjR8EJCOtu93ib14L8hCCZSRm2Ekax+0VVFqmjZayc -Bw/qa9wfLgZy7IaIEuQt218FL+TwA9MmM+eAws1CoRc0CwIDAQABo0IwQDAPBgNV -HRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUcRVnyMjJvXVd -ctA4GGqd83EkVAswDQYJKoZIhvcNAQELBQADggIBAHW7bVRLqhBYRjTyYtcWNl0I -XtVsyIe9tC5G8jH4fOpCtZMWVdyhDBKg2mF+D1hYc2Ryx+hFjtyp8iY/xnmMsVMI -M4GwVhO+5lFc2JsKT0ucVlMC6U/2DWDqTUJV6HwbISHTGzrMd/K4kPFox/la/vot -9L/J9UUbzjgQKjeKeaO04wlshYaT/4mWJ3iBj2fjRnRUjtkNaeJK9E10A/+yd+2V -Z5fkscWrv2oj6NSU4kQoYsRL4vDY4ilrGnB+JGGTe08DMiUNRSQrlrRGar9KC/ea -j8GsGsVn82800vpzY4zvFrCopEYq+OsS7HK07/grfoxSwIuEVPkvPuNVqNxmsdnh -X9izjFk0WaSrT2y7HxjbdavYy5LNlDhhDgcGH0tGEPEVvo2FXDtKK4F5D7Rpn0lQ -l033DlZdwJVqwjbDG2jJ9SrcR5q+ss7FJej6A7na+RZukYT1HCjI/CbM1xyQVqdf -bzoEvM14iQuODy+jqk+iGxI9FghAD/FGTNeqewjBCvVtJ94Cj8rDtSvK6evIIVM4 -pcw72Hc3MKJP2W/R8kCtQXoXxdZKNYm3QdV8hn9VTYNKpXMgwDqvkPGaJI7ZjnHK -e7iG2rKPmT4dEw0SEe7Uq/DpFXYC5ODfqiAeW2GFZECpkJcNrVPSWh2HagCXZWK0 -vm9qp/UsQu0yrbYhnr68 ------END CERTIFICATE----- - -# Issuer: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Subject: CN=Hellenic Academic and Research Institutions ECC RootCA 2015 O=Hellenic Academic and Research Institutions Cert. Authority -# Label: "Hellenic Academic and Research Institutions ECC RootCA 2015" -# Serial: 0 -# MD5 Fingerprint: 81:e5:b4:17:eb:c2:f5:e1:4b:0d:41:7b:49:92:fe:ef -# SHA1 Fingerprint: 9f:f1:71:8d:92:d5:9a:f3:7d:74:97:b4:bc:6f:84:68:0b:ba:b6:66 -# SHA256 Fingerprint: 44:b5:45:aa:8a:25:e6:5a:73:ca:15:dc:27:fc:36:d2:4c:1c:b9:95:3a:06:65:39:b1:15:82:dc:48:7b:48:33 ------BEGIN CERTIFICATE----- -MIICwzCCAkqgAwIBAgIBADAKBggqhkjOPQQDAjCBqjELMAkGA1UEBhMCR1IxDzAN -BgNVBAcTBkF0aGVuczFEMEIGA1UEChM7SGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDZXJ0LiBBdXRob3JpdHkxRDBCBgNVBAMTO0hl -bGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgRUNDIFJv -b3RDQSAyMDE1MB4XDTE1MDcwNzEwMzcxMloXDTQwMDYzMDEwMzcxMlowgaoxCzAJ -BgNVBAYTAkdSMQ8wDQYDVQQHEwZBdGhlbnMxRDBCBgNVBAoTO0hlbGxlbmljIEFj -YWRlbWljIGFuZCBSZXNlYXJjaCBJbnN0aXR1dGlvbnMgQ2VydC4gQXV0aG9yaXR5 -MUQwQgYDVQQDEztIZWxsZW5pYyBBY2FkZW1pYyBhbmQgUmVzZWFyY2ggSW5zdGl0 -dXRpb25zIEVDQyBSb290Q0EgMjAxNTB2MBAGByqGSM49AgEGBSuBBAAiA2IABJKg -QehLgoRc4vgxEZmGZE4JJS+dQS8KrjVPdJWyUWRrjWvmP3CV8AVER6ZyOFB2lQJa -jq4onvktTpnvLEhvTCUp6NFxW98dwXU3tNf6e3pCnGoKVlp8aQuqgAkkbH7BRqNC -MEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFLQi -C4KZJAEOnLvkDv2/+5cgk5kqMAoGCCqGSM49BAMCA2cAMGQCMGfOFmI4oqxiRaep -lSTAGiecMjvAwNW6qef4BENThe5SId6d9SWDPp5YSy/XZxMOIQIwBeF1Ad5o7Sof -TUwJCA3sS61kFyjndc5FZXIhF8siQQ6ME5g4mlRtm8rifOoCWCKR ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X1 O=Internet Security Research Group -# Subject: CN=ISRG Root X1 O=Internet Security Research Group -# Label: "ISRG Root X1" -# Serial: 172886928669790476064670243504169061120 -# MD5 Fingerprint: 0c:d2:f9:e0:da:17:73:e9:ed:86:4d:a5:e3:70:e7:4e -# SHA1 Fingerprint: ca:bd:2a:79:a1:07:6a:31:f2:1d:25:36:35:cb:03:9d:43:29:a5:e8 -# SHA256 Fingerprint: 96:bc:ec:06:26:49:76:f3:74:60:77:9a:cf:28:c5:a7:cf:e8:a3:c0:aa:e1:1a:8f:fc:ee:05:c0:bd:df:08:c6 ------BEGIN CERTIFICATE----- -MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw -TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh -cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 -WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu -ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY -MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc -h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ -0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U -A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW -T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH -B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC -B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv -KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn -OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn -jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw -qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI -rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV -HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq -hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL -ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ -3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK -NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 -ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur -TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC -jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc -oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq -4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA -mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d -emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= ------END CERTIFICATE----- - -# Issuer: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Subject: O=FNMT-RCM OU=AC RAIZ FNMT-RCM -# Label: "AC RAIZ FNMT-RCM" -# Serial: 485876308206448804701554682760554759 -# MD5 Fingerprint: e2:09:04:b4:d3:bd:d1:a0:14:fd:1a:d2:47:c4:57:1d -# SHA1 Fingerprint: ec:50:35:07:b2:15:c4:95:62:19:e2:a8:9a:5b:42:99:2c:4c:2c:20 -# SHA256 Fingerprint: eb:c5:57:0c:29:01:8c:4d:67:b1:aa:12:7b:af:12:f7:03:b4:61:1e:bc:17:b7:da:b5:57:38:94:17:9b:93:fa ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIPXZONMGc2yAYdGsdUhGkHMA0GCSqGSIb3DQEBCwUAMDsx -CzAJBgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJ -WiBGTk1ULVJDTTAeFw0wODEwMjkxNTU5NTZaFw0zMDAxMDEwMDAwMDBaMDsxCzAJ -BgNVBAYTAkVTMREwDwYDVQQKDAhGTk1ULVJDTTEZMBcGA1UECwwQQUMgUkFJWiBG -Tk1ULVJDTTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBALpxgHpMhm5/ -yBNtwMZ9HACXjywMI7sQmkCpGreHiPibVmr75nuOi5KOpyVdWRHbNi63URcfqQgf -BBckWKo3Shjf5TnUV/3XwSyRAZHiItQDwFj8d0fsjz50Q7qsNI1NOHZnjrDIbzAz -WHFctPVrbtQBULgTfmxKo0nRIBnuvMApGGWn3v7v3QqQIecaZ5JCEJhfTzC8PhxF -tBDXaEAUwED653cXeuYLj2VbPNmaUtu1vZ5Gzz3rkQUCwJaydkxNEJY7kvqcfw+Z -374jNUUeAlz+taibmSXaXvMiwzn15Cou08YfxGyqxRxqAQVKL9LFwag0Jl1mpdIC -IfkYtwb1TplvqKtMUejPUBjFd8g5CSxJkjKZqLsXF3mwWsXmo8RZZUc1g16p6DUL -mbvkzSDGm0oGObVo/CK67lWMK07q87Hj/LaZmtVC+nFNCM+HHmpxffnTtOmlcYF7 -wk5HlqX2doWjKI/pgG6BU6VtX7hI+cL5NqYuSf+4lsKMB7ObiFj86xsc3i1w4peS -MKGJ47xVqCfWS+2QrYv6YyVZLag13cqXM7zlzced0ezvXg5KkAYmY6252TUtB7p2 -ZSysV4999AeU14ECll2jB0nVetBX+RvnU0Z1qrB5QstocQjpYL05ac70r8NWQMet -UqIJ5G+GR4of6ygnXYMgrwTJbFaai0b1AgMBAAGjgYMwgYAwDwYDVR0TAQH/BAUw -AwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFPd9xf3E6Jobd2Sn9R2gzL+H -YJptMD4GA1UdIAQ3MDUwMwYEVR0gADArMCkGCCsGAQUFBwIBFh1odHRwOi8vd3d3 -LmNlcnQuZm5tdC5lcy9kcGNzLzANBgkqhkiG9w0BAQsFAAOCAgEAB5BK3/MjTvDD -nFFlm5wioooMhfNzKWtN/gHiqQxjAb8EZ6WdmF/9ARP67Jpi6Yb+tmLSbkyU+8B1 -RXxlDPiyN8+sD8+Nb/kZ94/sHvJwnvDKuO+3/3Y3dlv2bojzr2IyIpMNOmqOFGYM -LVN0V2Ue1bLdI4E7pWYjJ2cJj+F3qkPNZVEI7VFY/uY5+ctHhKQV8Xa7pO6kO8Rf -77IzlhEYt8llvhjho6Tc+hj507wTmzl6NLrTQfv6MooqtyuGC2mDOL7Nii4LcK2N -JpLuHvUBKwrZ1pebbuCoGRw6IYsMHkCtA+fdZn71uSANA+iW+YJF1DngoABd15jm -fZ5nc8OaKveri6E6FO80vFIOiZiaBECEHX5FaZNXzuvO+FB8TxxuBEOb+dY7Ixjp -6o7RTUaN8Tvkasq6+yO3m/qZASlaWFot4/nUbQ4mrcFuNLwy+AwF+mWj2zs3gyLp -1txyM/1d8iC9djwj2ij3+RvrWWTV3F9yfiD8zYm1kGdNYno/Tq0dwzn+evQoFt9B -9kiABdcPUXmsEKvU7ANm5mqwujGSQkBqvjrTcuFqN1W8rB2Vt2lh8kORdOag0wok -RqEIr9baRRmW1FMdW4R58MD3R++Lj8UGrp1MYp3/RgT408m2ECVAdf4WqslKYIYv -uu8wd+RU4riEmViAqhOLUTpPSPaLtrM= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 1 O=Amazon -# Subject: CN=Amazon Root CA 1 O=Amazon -# Label: "Amazon Root CA 1" -# Serial: 143266978916655856878034712317230054538369994 -# MD5 Fingerprint: 43:c6:bf:ae:ec:fe:ad:2f:18:c6:88:68:30:fc:c8:e6 -# SHA1 Fingerprint: 8d:a7:f9:65:ec:5e:fc:37:91:0f:1c:6e:59:fd:c1:cc:6a:6e:de:16 -# SHA256 Fingerprint: 8e:cd:e6:88:4f:3d:87:b1:12:5b:a3:1a:c3:fc:b1:3d:70:16:de:7f:57:cc:90:4f:e1:cb:97:c6:ae:98:19:6e ------BEGIN CERTIFICATE----- -MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj -ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM -9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw -IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6 -VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L -93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm -jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC -AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA -A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI -U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs -N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv -o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU -5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy -rqXRfboQnoZsG4q5WTP468SQvvG5 ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 2 O=Amazon -# Subject: CN=Amazon Root CA 2 O=Amazon -# Label: "Amazon Root CA 2" -# Serial: 143266982885963551818349160658925006970653239 -# MD5 Fingerprint: c8:e5:8d:ce:a8:42:e2:7a:c0:2a:5c:7c:9e:26:bf:66 -# SHA1 Fingerprint: 5a:8c:ef:45:d7:a6:98:59:76:7a:8c:8b:44:96:b5:78:cf:47:4b:1a -# SHA256 Fingerprint: 1b:a5:b2:aa:8c:65:40:1a:82:96:01:18:f8:0b:ec:4f:62:30:4d:83:ce:c4:71:3a:19:c3:9c:01:1e:a4:6d:b4 ------BEGIN CERTIFICATE----- -MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF -ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6 -b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL -MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv -b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK -gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ -W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg -1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K -8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r -2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me -z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR -8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj -mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz -7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6 -+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI -0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm -UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2 -LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY -+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS -k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl -7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm -btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl -urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+ -fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63 -n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE -76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H -9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT -4PsJYGw= ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 3 O=Amazon -# Subject: CN=Amazon Root CA 3 O=Amazon -# Label: "Amazon Root CA 3" -# Serial: 143266986699090766294700635381230934788665930 -# MD5 Fingerprint: a0:d4:ef:0b:f7:b5:d8:49:95:2a:ec:f5:c4:fc:81:87 -# SHA1 Fingerprint: 0d:44:dd:8c:3c:8c:1a:1a:58:75:64:81:e9:0f:2e:2a:ff:b3:d2:6e -# SHA256 Fingerprint: 18:ce:6c:fe:7b:f1:4e:60:b2:e3:47:b8:df:e8:68:cb:31:d0:2e:bb:3a:da:27:15:69:f5:03:43:b4:6d:b3:a4 ------BEGIN CERTIFICATE----- -MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl -ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j -QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr -ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr -BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM -YyRIHN8wfdVoOw== ------END CERTIFICATE----- - -# Issuer: CN=Amazon Root CA 4 O=Amazon -# Subject: CN=Amazon Root CA 4 O=Amazon -# Label: "Amazon Root CA 4" -# Serial: 143266989758080763974105200630763877849284878 -# MD5 Fingerprint: 89:bc:27:d5:eb:17:8d:06:6a:69:d5:fd:89:47:b4:cd -# SHA1 Fingerprint: f6:10:84:07:d6:f8:bb:67:98:0c:c2:e2:44:c2:eb:ae:1c:ef:63:be -# SHA256 Fingerprint: e3:5d:28:41:9e:d0:20:25:cf:a6:90:38:cd:62:39:62:45:8d:a5:c6:95:fb:de:a3:c2:2b:0b:fb:25:89:70:92 ------BEGIN CERTIFICATE----- -MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5 -MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g -Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG -A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg -Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi -9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk -M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB -/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB -MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw -CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW -1KyLa2tJElMzrdfkviT8tQp21KW8EA== ------END CERTIFICATE----- - -# Issuer: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Subject: CN=TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1 O=Turkiye Bilimsel ve Teknolojik Arastirma Kurumu - TUBITAK OU=Kamu Sertifikasyon Merkezi - Kamu SM -# Label: "TUBITAK Kamu SM SSL Kok Sertifikasi - Surum 1" -# Serial: 1 -# MD5 Fingerprint: dc:00:81:dc:69:2f:3e:2f:b0:3b:f6:3d:5a:91:8e:49 -# SHA1 Fingerprint: 31:43:64:9b:ec:ce:27:ec:ed:3a:3f:0b:8f:0d:e4:e8:91:dd:ee:ca -# SHA256 Fingerprint: 46:ed:c3:68:90:46:d5:3a:45:3f:b3:10:4a:b8:0d:ca:ec:65:8b:26:60:ea:16:29:dd:7e:86:79:90:64:87:16 ------BEGIN CERTIFICATE----- -MIIEYzCCA0ugAwIBAgIBATANBgkqhkiG9w0BAQsFADCB0jELMAkGA1UEBhMCVFIx -GDAWBgNVBAcTD0dlYnplIC0gS29jYWVsaTFCMEAGA1UEChM5VHVya2l5ZSBCaWxp -bXNlbCB2ZSBUZWtub2xvamlrIEFyYXN0aXJtYSBLdXJ1bXUgLSBUVUJJVEFLMS0w -KwYDVQQLEyRLYW11IFNlcnRpZmlrYXN5b24gTWVya2V6aSAtIEthbXUgU00xNjA0 -BgNVBAMTLVRVQklUQUsgS2FtdSBTTSBTU0wgS29rIFNlcnRpZmlrYXNpIC0gU3Vy -dW0gMTAeFw0xMzExMjUwODI1NTVaFw00MzEwMjUwODI1NTVaMIHSMQswCQYDVQQG -EwJUUjEYMBYGA1UEBxMPR2ViemUgLSBLb2NhZWxpMUIwQAYDVQQKEzlUdXJraXll -IEJpbGltc2VsIHZlIFRla25vbG9qaWsgQXJhc3Rpcm1hIEt1cnVtdSAtIFRVQklU -QUsxLTArBgNVBAsTJEthbXUgU2VydGlmaWthc3lvbiBNZXJrZXppIC0gS2FtdSBT -TTE2MDQGA1UEAxMtVFVCSVRBSyBLYW11IFNNIFNTTCBLb2sgU2VydGlmaWthc2kg -LSBTdXJ1bSAxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAr3UwM6q7 -a9OZLBI3hNmNe5eA027n/5tQlT6QlVZC1xl8JoSNkvoBHToP4mQ4t4y86Ij5iySr -LqP1N+RAjhgleYN1Hzv/bKjFxlb4tO2KRKOrbEz8HdDc72i9z+SqzvBV96I01INr -N3wcwv61A+xXzry0tcXtAA9TNypN9E8Mg/uGz8v+jE69h/mniyFXnHrfA2eJLJ2X -YacQuFWQfw4tJzh03+f92k4S400VIgLI4OD8D62K18lUUMw7D8oWgITQUVbDjlZ/ -iSIzL+aFCr2lqBs23tPcLG07xxO9WSMs5uWk99gL7eqQQESolbuT1dCANLZGeA4f -AJNG4e7p+exPFwIDAQABo0IwQDAdBgNVHQ4EFgQUZT/HiobGPN08VFw1+DrtUgxH -V8gwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBACo/4fEyjq7hmFxLXs9rHmoJ0iKpEsdeV31zVmSAhHqT5Am5EM2fKifh -AHe+SMg1qIGf5LgsyX8OsNJLN13qudULXjS99HMpw+0mFZx+CFOKWI3QSyjfwbPf -IPP54+M638yclNhOT8NrF7f3cuitZjO1JVOr4PhMqZ398g26rrnZqsZr+ZO7rqu4 -lzwDGrpDxpa5RXI4s6ehlj2Re37AIVNMh+3yC1SVUZPVIqUNivGTDj5UDrDYyU7c -8jEyVupk+eq1nRZmQnLzf9OxMUP8pI4X8W0jq5Rm+K37DwhuJi1/FwcJsoz7UMCf -lo3Ptv0AnVoUmr8CRPXBwp8iXqIPoeM= ------END CERTIFICATE----- - -# Issuer: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Subject: CN=GDCA TrustAUTH R5 ROOT O=GUANG DONG CERTIFICATE AUTHORITY CO.,LTD. -# Label: "GDCA TrustAUTH R5 ROOT" -# Serial: 9009899650740120186 -# MD5 Fingerprint: 63:cc:d9:3d:34:35:5c:6f:53:a3:e2:08:70:48:1f:b4 -# SHA1 Fingerprint: 0f:36:38:5b:81:1a:25:c3:9b:31:4e:83:ca:e9:34:66:70:cc:74:b4 -# SHA256 Fingerprint: bf:ff:8f:d0:44:33:48:7d:6a:8a:a6:0c:1a:29:76:7a:9f:c2:bb:b0:5e:42:0f:71:3a:13:b9:92:89:1d:38:93 ------BEGIN CERTIFICATE----- -MIIFiDCCA3CgAwIBAgIIfQmX/vBH6nowDQYJKoZIhvcNAQELBQAwYjELMAkGA1UE -BhMCQ04xMjAwBgNVBAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZ -IENPLixMVEQuMR8wHQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMB4XDTE0 -MTEyNjA1MTMxNVoXDTQwMTIzMTE1NTk1OVowYjELMAkGA1UEBhMCQ04xMjAwBgNV -BAoMKUdVQU5HIERPTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZIENPLixMVEQuMR8w -HQYDVQQDDBZHRENBIFRydXN0QVVUSCBSNSBST09UMIICIjANBgkqhkiG9w0BAQEF -AAOCAg8AMIICCgKCAgEA2aMW8Mh0dHeb7zMNOwZ+Vfy1YI92hhJCfVZmPoiC7XJj -Dp6L3TQsAlFRwxn9WVSEyfFrs0yw6ehGXTjGoqcuEVe6ghWinI9tsJlKCvLriXBj -TnnEt1u9ol2x8kECK62pOqPseQrsXzrj/e+APK00mxqriCZ7VqKChh/rNYmDf1+u -KU49tm7srsHwJ5uu4/Ts765/94Y9cnrrpftZTqfrlYwiOXnhLQiPzLyRuEH3FMEj -qcOtmkVEs7LXLM3GKeJQEK5cy4KOFxg2fZfmiJqwTTQJ9Cy5WmYqsBebnh52nUpm -MUHfP/vFBu8btn4aRjb3ZGM74zkYI+dndRTVdVeSN72+ahsmUPI2JgaQxXABZG12 -ZuGR224HwGGALrIuL4xwp9E7PLOR5G62xDtw8mySlwnNR30YwPO7ng/Wi64HtloP -zgsMR6flPri9fcebNaBhlzpBdRfMK5Z3KpIhHtmVdiBnaM8Nvd/WHwlqmuLMc3Gk -L30SgLdTMEZeS1SZD2fJpcjyIMGC7J0R38IC+xo70e0gmu9lZJIQDSri3nDxGGeC -jGHeuLzRL5z7D9Ar7Rt2ueQ5Vfj4oR24qoAATILnsn8JuLwwoC8N9VKejveSswoA -HQBUlwbgsQfZxw9cZX08bVlX5O2ljelAU58VS6Bx9hoh49pwBiFYFIeFd3mqgnkC -AwEAAaNCMEAwHQYDVR0OBBYEFOLJQJ9NzuiaoXzPDj9lxSmIahlRMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQDRSVfg -p8xoWLoBDysZzY2wYUWsEe1jUGn4H3++Fo/9nesLqjJHdtJnJO29fDMylyrHBYZm -DRd9FBUb1Ov9H5r2XpdptxolpAqzkT9fNqyL7FeoPueBihhXOYV0GkLH6VsTX4/5 -COmSdI31R9KrO9b7eGZONn356ZLpBN79SWP8bfsUcZNnL0dKt7n/HipzcEYwv1ry -L3ml4Y0M2fmyYzeMN2WFcGpcWwlyua1jPLHd+PwyvzeG5LuOmCd+uh8W4XAR8gPf -JWIyJyYYMoSf/wA6E7qaTfRPuBRwIrHKK5DOKcFw9C+df/KQHtZa37dG/OaG+svg -IHZ6uqbL9XzeYqWxi+7egmaKTjowHz+Ay60nugxe19CxVsp3cbK1daFQqUBDF8Io -2c9Si1vIY9RCPqAzekYu9wogRlR+ak8x8YF+QnQ4ZXMn7sZ8uI7XpTrXmKGcjBBV -09tL7ECQ8s1uV9JiDnxXk7Gnbc2dg7sq5+W2O3FYrf3RRbxake5TFW/TRQl1brqQ -XR4EzzffHqhmsYzmIGrv/EhOdJhCrylvLmrH+33RZjEizIYAfmaDDEL0vTSSwxrq -T8p+ck0LcIymSLumoRT2+1hEmRSuqguTaaApJUqlyyvdimYHFngVV3Eb7PVHhPOe -MTd61X8kreS8/f3MboPoDKi3QWwH3b08hpcv0g== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority RSA O=SSL Corporation -# Label: "SSL.com Root Certification Authority RSA" -# Serial: 8875640296558310041 -# MD5 Fingerprint: 86:69:12:c0:70:f1:ec:ac:ac:c2:d5:bc:a5:5b:a1:29 -# SHA1 Fingerprint: b7:ab:33:08:d1:ea:44:77:ba:14:80:12:5a:6f:bd:a9:36:49:0c:bb -# SHA256 Fingerprint: 85:66:6a:56:2e:e0:be:5c:e9:25:c1:d8:89:0a:6f:76:a8:7e:c1:6d:4d:7d:5f:29:ea:74:19:cf:20:12:3b:69 ------BEGIN CERTIFICATE----- -MIIF3TCCA8WgAwIBAgIIeyyb0xaAMpkwDQYJKoZIhvcNAQELBQAwfDELMAkGA1UE -BhMCVVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQK -DA9TU0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBSU0EwHhcNMTYwMjEyMTczOTM5WhcNNDEwMjEyMTcz -OTM5WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNv -bSBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFJTQTCCAiIwDQYJKoZIhvcN -AQEBBQADggIPADCCAgoCggIBAPkP3aMrfcvQKv7sZ4Wm5y4bunfh4/WvpOz6Sl2R -xFdHaxh3a3by/ZPkPQ/CFp4LZsNWlJ4Xg4XOVu/yFv0AYvUiCVToZRdOQbngT0aX -qhvIuG5iXmmxX9sqAn78bMrzQdjt0Oj8P2FI7bADFB0QDksZ4LtO7IZl/zbzXmcC -C52GVWH9ejjt/uIZALdvoVBidXQ8oPrIJZK0bnoix/geoeOy3ZExqysdBP+lSgQ3 -6YWkMyv94tZVNHwZpEpox7Ko07fKoZOI68GXvIz5HdkihCR0xwQ9aqkpk8zruFvh -/l8lqjRYyMEjVJ0bmBHDOJx+PYZspQ9AhnwC9FwCTyjLrnGfDzrIM/4RJTXq/LrF -YD3ZfBjVsqnTdXgDciLKOsMf7yzlLqn6niy2UUb9rwPW6mBo6oUWNmuF6R7As93E -JNyAKoFBbZQ+yODJgUEAnl6/f8UImKIYLEJAs/lvOCdLToD0PYFH4Ih86hzOtXVc -US4cK38acijnALXRdMbX5J+tB5O2UzU1/Dfkw/ZdFr4hc96SCvigY2q8lpJqPvi8 -ZVWb3vUNiSYE/CUapiVpy8JtynziWV+XrOvvLsi81xtZPCvM8hnIk2snYxnP/Okm -+Mpxm3+T/jRnhE6Z6/yzeAkzcLpmpnbtG3PrGqUNxCITIJRWCk4sbE6x/c+cCbqi -M+2HAgMBAAGjYzBhMB0GA1UdDgQWBBTdBAkHovV6fVJTEpKV7jiAJQ2mWTAPBgNV -HRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFN0ECQei9Xp9UlMSkpXuOIAlDaZZMA4G -A1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAIBgRlCn7Jp0cHh5wYfGV -cpNxJK1ok1iOMq8bs3AD/CUrdIWQPXhq9LmLpZc7tRiRux6n+UBbkflVma8eEdBc -Hadm47GUBwwyOabqG7B52B2ccETjit3E+ZUfijhDPwGFpUenPUayvOUiaPd7nNgs -PgohyC0zrL/FgZkxdMF1ccW+sfAjRfSda/wZY52jvATGGAslu1OJD7OAUN5F7kR/ -q5R4ZJjT9ijdh9hwZXT7DrkT66cPYakylszeu+1jTBi7qUD3oFRuIIhxdRjqerQ0 -cuAjJ3dctpDqhiVAq+8zD8ufgr6iIPv2tS0a5sKFsXQP+8hlAqRSAUfdSSLBv9jr -a6x+3uxjMxW3IwiPxg+NQVrdjsW5j+VFP3jbutIbQLH+cU0/4IGiul607BXgk90I -H37hVZkLId6Tngr75qNJvTYw/ud3sqB1l7UtgYgXZSD32pAAn8lSzDLKNXz1PQ/Y -K9f1JmzJBjSWFupwWRoyeXkLtoh/D1JIPb9s2KJELtFOt3JY04kTlf5Eq/jXixtu -nLwsoFvVagCvXzfh1foQC5ichucmj87w7G6KVwuA406ywKBjYZC6VWg3dGq2ktuf -oYYitmUnDuy2n0Jg5GfCtdpBC8TTi2EbvPofkSvXRAdeuims2cXp71NIWuuA8ShY -Ic2wBlX7Jz9TkHCpBB5XJ7k= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com Root Certification Authority ECC" -# Serial: 8495723813297216424 -# MD5 Fingerprint: 2e:da:e4:39:7f:9c:8f:37:d1:70:9f:26:17:51:3a:8e -# SHA1 Fingerprint: c3:19:7c:39:24:e6:54:af:1b:c4:ab:20:95:7a:e2:c3:0e:13:02:6a -# SHA256 Fingerprint: 34:17:bb:06:cc:60:07:da:1b:96:1c:92:0b:8a:b4:ce:3f:ad:82:0e:4a:a3:0b:9a:cb:c4:a7:4e:bd:ce:bc:65 ------BEGIN CERTIFICATE----- -MIICjTCCAhSgAwIBAgIIdebfy8FoW6gwCgYIKoZIzj0EAwIwfDELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xMTAvBgNVBAMMKFNTTC5jb20gUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNDAzWhcNNDEwMjEyMTgxNDAz -WjB8MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hvdXN0 -b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjExMC8GA1UEAwwoU1NMLmNvbSBS -b290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABEVuqVDEpiM2nl8ojRfLliJkP9x6jh3MCLOicSS6jkm5BBtHllirLZXI -7Z4INcgn64mMU1jrYor+8FsPazFSY0E7ic3s7LaNGdM0B9y7xgZ/wkWV7Mt/qCPg -CemB+vNH06NjMGEwHQYDVR0OBBYEFILRhXMw5zUE044CkvvlpNHEIejNMA8GA1Ud -EwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUgtGFczDnNQTTjgKS++Wk0cQh6M0wDgYD -VR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2cAMGQCMG/n61kRpGDPYbCWe+0F+S8T -kdzt5fxQaxFGRrMcIQBiu77D5+jNB5n5DQtdcj7EqgIwH7y6C+IwJPt8bYBVCpk+ -gA0z5Wajs6O7pdWLjwkspl1+4vAHCGht0nxpbl/f5Wpl ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority RSA R2 O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority RSA R2" -# Serial: 6248227494352943350 -# MD5 Fingerprint: e1:1e:31:58:1a:ae:54:53:02:f6:17:6a:11:7b:4d:95 -# SHA1 Fingerprint: 74:3a:f0:52:9b:d0:32:a0:f4:4a:83:cd:d4:ba:a9:7b:7c:2e:c4:9a -# SHA256 Fingerprint: 2e:7b:f1:6c:c2:24:85:a7:bb:e2:aa:86:96:75:07:61:b0:ae:39:be:3b:2f:e9:d0:cc:6d:4e:f7:34:91:42:5c ------BEGIN CERTIFICATE----- -MIIF6zCCA9OgAwIBAgIIVrYpzTS8ePYwDQYJKoZIhvcNAQELBQAwgYIxCzAJBgNV -BAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4GA1UEBwwHSG91c3RvbjEYMBYGA1UE -CgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQDDC5TU0wuY29tIEVWIFJvb3QgQ2Vy -dGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIyMB4XDTE3MDUzMTE4MTQzN1oXDTQy -MDUzMDE4MTQzN1owgYIxCzAJBgNVBAYTAlVTMQ4wDAYDVQQIDAVUZXhhczEQMA4G -A1UEBwwHSG91c3RvbjEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMTcwNQYDVQQD -DC5TU0wuY29tIEVWIFJvb3QgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgUlNBIFIy -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAjzZlQOHWTcDXtOlG2mvq -M0fNTPl9fb69LT3w23jhhqXZuglXaO1XPqDQCEGD5yhBJB/jchXQARr7XnAjssuf -OePPxU7Gkm0mxnu7s9onnQqG6YE3Bf7wcXHswxzpY6IXFJ3vG2fThVUCAtZJycxa -4bH3bzKfydQ7iEGonL3Lq9ttewkfokxykNorCPzPPFTOZw+oz12WGQvE43LrrdF9 -HSfvkusQv1vrO6/PgN3B0pYEW3p+pKk8OHakYo6gOV7qd89dAFmPZiw+B6KjBSYR -aZfqhbcPlgtLyEDhULouisv3D5oi53+aNxPN8k0TayHRwMwi8qFG9kRpnMphNQcA -b9ZhCBHqurj26bNg5U257J8UZslXWNvNh2n4ioYSA0e/ZhN2rHd9NCSFg83XqpyQ -Gp8hLH94t2S42Oim9HizVcuE0jLEeK6jj2HdzghTreyI/BXkmg3mnxp3zkyPuBQV -PWKchjgGAGYS5Fl2WlPAApiiECtoRHuOec4zSnaqW4EWG7WK2NAAe15itAnWhmMO -pgWVSbooi4iTsjQc2KRVbrcc0N6ZVTsj9CLg+SlmJuwgUHfbSguPvuUCYHBBXtSu -UDkiFCbLsjtzdFVHB3mBOagwE0TlBIqulhMlQg+5U8Sb/M3kHN48+qvWBkofZ6aY -MBzdLNvcGJVXZsb/XItW9XcCAwEAAaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNV -HSMEGDAWgBT5YLvU49U09rj1BoAlp3PbRmmonjAdBgNVHQ4EFgQU+WC71OPVNPa4 -9QaAJadz20ZpqJ4wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEBCwUAA4ICAQBW -s47LCp1Jjr+kxJG7ZhcFUZh1++VQLHqe8RT6q9OKPv+RKY9ji9i0qVQBDb6Thi/5 -Sm3HXvVX+cpVHBK+Rw82xd9qt9t1wkclf7nxY/hoLVUE0fKNsKTPvDxeH3jnpaAg -cLAExbf3cqfeIg29MyVGjGSSJuM+LmOW2puMPfgYCdcDzH2GguDKBAdRUNf/ktUM -79qGn5nX67evaOI5JpS6aLe/g9Pqemc9YmeuJeVy6OLk7K4S9ksrPJ/psEDzOFSz -/bdoyNrGj1E8svuR3Bznm53htw1yj+KkxKl4+esUrMZDBcJlOSgYAsOCsp0FvmXt -ll9ldDz7CTUue5wT/RsPXcdtgTpWD8w74a8CLyKsRspGPKAcTNZEtF4uXBVmCeEm -Kf7GUmG6sXP/wwyc5WxqlD8UykAWlYTzWamsX0xhk23RO8yilQwipmdnRC652dKK -QbNmC1r7fSOl8hqw/96bg5Qu0T/fkreRrwU7ZcegbLHNYhLDkBvjJc40vG93drEQ -w/cFGsDWr3RiSBd3kmmQYRzelYB0VI8YHMPzA9C/pEN1hlMYegouCRw2n5H9gooi -S9EOUCXdywMMF8mDAAhONU2Ki+3wApRmLER/y5UnlhetCTCstnEXbosX9hwJ1C07 -mKVx01QT2WDz9UtmT/rx7iASjbSsV7FFY6GsdqnC+w== ------END CERTIFICATE----- - -# Issuer: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Subject: CN=SSL.com EV Root Certification Authority ECC O=SSL Corporation -# Label: "SSL.com EV Root Certification Authority ECC" -# Serial: 3182246526754555285 -# MD5 Fingerprint: 59:53:22:65:83:42:01:54:c0:ce:42:b9:5a:7c:f2:90 -# SHA1 Fingerprint: 4c:dd:51:a3:d1:f5:20:32:14:b0:c6:c5:32:23:03:91:c7:46:42:6d -# SHA256 Fingerprint: 22:a2:c1:f7:bd:ed:70:4c:c1:e7:01:b5:f4:08:c3:10:88:0f:e9:56:b5:de:2a:4a:44:f9:9c:87:3a:25:a7:c8 ------BEGIN CERTIFICATE----- -MIIClDCCAhqgAwIBAgIILCmcWxbtBZUwCgYIKoZIzj0EAwIwfzELMAkGA1UEBhMC -VVMxDjAMBgNVBAgMBVRleGFzMRAwDgYDVQQHDAdIb3VzdG9uMRgwFgYDVQQKDA9T -U0wgQ29ycG9yYXRpb24xNDAyBgNVBAMMK1NTTC5jb20gRVYgUm9vdCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eSBFQ0MwHhcNMTYwMjEyMTgxNTIzWhcNNDEwMjEyMTgx -NTIzWjB/MQswCQYDVQQGEwJVUzEOMAwGA1UECAwFVGV4YXMxEDAOBgNVBAcMB0hv -dXN0b24xGDAWBgNVBAoMD1NTTCBDb3Jwb3JhdGlvbjE0MDIGA1UEAwwrU1NMLmNv -bSBFViBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5IEVDQzB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABKoSR5CYG/vvw0AHgyBO8TCCogbR8pKGYfL2IWjKAMTH6kMA -VIbc/R/fALhBYlzccBYy3h+Z1MzFB8gIH2EWB1E9fVwHU+M1OIzfzZ/ZLg1Kthku -WnBaBu2+8KGwytAJKaNjMGEwHQYDVR0OBBYEFFvKXuXe0oGqzagtZFG22XKbl+ZP -MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUW8pe5d7SgarNqC1kUbbZcpuX -5k8wDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMCA2gAMGUCMQCK5kCJN+vp1RPZ -ytRrJPOwPYdGWBrssd9v+1a6cGvHOMzosYxPD/fxZ3YOg9AeUY8CMD32IygmTMZg -h5Mmm7I1HrrW9zzRHM76JTymGoEVW/MSD2zuZYrJh6j5B+BimoxcSg== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R6 -# Label: "GlobalSign Root CA - R6" -# Serial: 1417766617973444989252670301619537 -# MD5 Fingerprint: 4f:dd:07:e4:d4:22:64:39:1e:0c:37:42:ea:d1:c6:ae -# SHA1 Fingerprint: 80:94:64:0e:b5:a7:a1:ca:11:9c:1f:dd:d5:9f:81:02:63:a7:fb:d1 -# SHA256 Fingerprint: 2c:ab:ea:fe:37:d0:6c:a2:2a:ba:73:91:c0:03:3d:25:98:29:52:c4:53:64:73:49:76:3a:3a:b5:ad:6c:cf:69 ------BEGIN CERTIFICATE----- -MIIFgzCCA2ugAwIBAgIORea7A4Mzw4VlSOb/RVEwDQYJKoZIhvcNAQEMBQAwTDEg -MB4GA1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjYxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTQxMjEwMDAwMDAwWhcNMzQx -MjEwMDAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSNjET -MBEGA1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAJUH6HPKZvnsFMp7PPcNCPG0RQssgrRI -xutbPK6DuEGSMxSkb3/pKszGsIhrxbaJ0cay/xTOURQh7ErdG1rG1ofuTToVBu1k -ZguSgMpE3nOUTvOniX9PeGMIyBJQbUJmL025eShNUhqKGoC3GYEOfsSKvGRMIRxD -aNc9PIrFsmbVkJq3MQbFvuJtMgamHvm566qjuL++gmNQ0PAYid/kD3n16qIfKtJw -LnvnvJO7bVPiSHyMEAc4/2ayd2F+4OqMPKq0pPbzlUoSB239jLKJz9CgYXfIWHSw -1CM69106yqLbnQneXUQtkPGBzVeS+n68UARjNN9rkxi+azayOeSsJDa38O+2HBNX -k7besvjihbdzorg1qkXy4J02oW9UivFyVm4uiMVRQkQVlO6jxTiWm05OWgtH8wY2 -SXcwvHE35absIQh1/OZhFj931dmRl4QKbNQCTXTAFO39OfuD8l4UoQSwC+n+7o/h -bguyCLNhZglqsQY6ZZZZwPA1/cnaKI0aEYdwgQqomnUdnjqGBQCe24DWJfncBZ4n -WUx2OVvq+aWh2IMP0f/fMBH5hc8zSPXKbWQULHpYT9NLCEnFlWQaYw55PfWzjMpY -rZxCRXluDocZXFSxZba/jJvcE+kNb7gu3GduyYsRtYQUigAZcIN5kZeR1Bonvzce -MgfYFGM8KEyvAgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTAD -AQH/MB0GA1UdDgQWBBSubAWjkxPioufi1xzWx/B/yGdToDAfBgNVHSMEGDAWgBSu -bAWjkxPioufi1xzWx/B/yGdToDANBgkqhkiG9w0BAQwFAAOCAgEAgyXt6NH9lVLN -nsAEoJFp5lzQhN7craJP6Ed41mWYqVuoPId8AorRbrcWc+ZfwFSY1XS+wc3iEZGt -Ixg93eFyRJa0lV7Ae46ZeBZDE1ZXs6KzO7V33EByrKPrmzU+sQghoefEQzd5Mr61 -55wsTLxDKZmOMNOsIeDjHfrYBzN2VAAiKrlNIC5waNrlU/yDXNOd8v9EDERm8tLj -vUYAGm0CuiVdjaExUd1URhxN25mW7xocBFymFe944Hn+Xds+qkxV/ZoVqW/hpvvf -cDDpw+5CRu3CkwWJ+n1jez/QcYF8AOiYrg54NMMl+68KnyBr3TsTjxKM4kEaSHpz -oHdpx7Zcf4LIHv5YGygrqGytXm3ABdJ7t+uA/iU3/gKbaKxCXcPu9czc8FB10jZp -nOZ7BN9uBmm23goJSFmH63sUYHpkqmlD75HHTOwY3WzvUy2MmeFe8nI+z1TIvWfs -pA9MRf/TuTAjB0yPEL+GltmZWrSZVxykzLsViVO6LAUP5MSeGbEYNNVMnbrt9x+v -JJUEeKgDu+6B5dpffItKoZB0JaezPkvILFa9x8jvOOJckvB595yEunQtYQEgfn7R -8k8HWV+LLUNS60YMlOH1Zkd5d9VUWx+tJDfLRVpOoERIyNiwmcUVhAn21klJwGW4 -5hpxbqCo8YLoRT5s1gLXCmeDBVrJpBA= ------END CERTIFICATE----- - -# Issuer: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Subject: CN=OISTE WISeKey Global Root GC CA O=WISeKey OU=OISTE Foundation Endorsed -# Label: "OISTE WISeKey Global Root GC CA" -# Serial: 44084345621038548146064804565436152554 -# MD5 Fingerprint: a9:d6:b9:2d:2f:93:64:f8:a5:69:ca:91:e9:68:07:23 -# SHA1 Fingerprint: e0:11:84:5e:34:de:be:88:81:b9:9c:f6:16:26:d1:96:1f:c3:b9:31 -# SHA256 Fingerprint: 85:60:f9:1c:36:24:da:ba:95:70:b5:fe:a0:db:e3:6f:f1:1a:83:23:be:94:86:85:4f:b3:f3:4a:55:71:19:8d ------BEGIN CERTIFICATE----- -MIICaTCCAe+gAwIBAgIQISpWDK7aDKtARb8roi066jAKBggqhkjOPQQDAzBtMQsw -CQYDVQQGEwJDSDEQMA4GA1UEChMHV0lTZUtleTEiMCAGA1UECxMZT0lTVEUgRm91 -bmRhdGlvbiBFbmRvcnNlZDEoMCYGA1UEAxMfT0lTVEUgV0lTZUtleSBHbG9iYWwg -Um9vdCBHQyBDQTAeFw0xNzA1MDkwOTQ4MzRaFw00MjA1MDkwOTU4MzNaMG0xCzAJ -BgNVBAYTAkNIMRAwDgYDVQQKEwdXSVNlS2V5MSIwIAYDVQQLExlPSVNURSBGb3Vu -ZGF0aW9uIEVuZG9yc2VkMSgwJgYDVQQDEx9PSVNURSBXSVNlS2V5IEdsb2JhbCBS -b290IEdDIENBMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAETOlQwMYPchi82PG6s4ni -eUqjFqdrVCTbUf/q9Akkwwsin8tqJ4KBDdLArzHkdIJuyiXZjHWd8dvQmqJLIX4W -p2OQ0jnUsYd4XxiWD1AbNTcPasbc2RNNpI6QN+a9WzGRo1QwUjAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUSIcUrOPDnpBgOtfKie7T -rYy0UGYwEAYJKwYBBAGCNxUBBAMCAQAwCgYIKoZIzj0EAwMDaAAwZQIwJsdpW9zV -57LnyAyMjMPdeYwbY9XJUpROTYJKcx6ygISpJcBMWm1JKWB4E+J+SOtkAjEA2zQg -Mgj/mkkCtojeFK9dbJlxjRo/i9fgojaGHAeCOnZT/cKi7e97sIBPWA9LUzm9 ------END CERTIFICATE----- - -# Issuer: CN=UCA Global G2 Root O=UniTrust -# Subject: CN=UCA Global G2 Root O=UniTrust -# Label: "UCA Global G2 Root" -# Serial: 124779693093741543919145257850076631279 -# MD5 Fingerprint: 80:fe:f0:c4:4a:f0:5c:62:32:9f:1c:ba:78:a9:50:f8 -# SHA1 Fingerprint: 28:f9:78:16:19:7a:ff:18:25:18:aa:44:fe:c1:a0:ce:5c:b6:4c:8a -# SHA256 Fingerprint: 9b:ea:11:c9:76:fe:01:47:64:c1:be:56:a6:f9:14:b5:a5:60:31:7a:bd:99:88:39:33:82:e5:16:1a:a0:49:3c ------BEGIN CERTIFICATE----- -MIIFRjCCAy6gAwIBAgIQXd+x2lqj7V2+WmUgZQOQ7zANBgkqhkiG9w0BAQsFADA9 -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxGzAZBgNVBAMMElVDQSBH -bG9iYWwgRzIgUm9vdDAeFw0xNjAzMTEwMDAwMDBaFw00MDEyMzEwMDAwMDBaMD0x -CzAJBgNVBAYTAkNOMREwDwYDVQQKDAhVbmlUcnVzdDEbMBkGA1UEAwwSVUNBIEds -b2JhbCBHMiBSb290MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxeYr -b3zvJgUno4Ek2m/LAfmZmqkywiKHYUGRO8vDaBsGxUypK8FnFyIdK+35KYmToni9 -kmugow2ifsqTs6bRjDXVdfkX9s9FxeV67HeToI8jrg4aA3++1NDtLnurRiNb/yzm -VHqUwCoV8MmNsHo7JOHXaOIxPAYzRrZUEaalLyJUKlgNAQLx+hVRZ2zA+te2G3/R -VogvGjqNO7uCEeBHANBSh6v7hn4PJGtAnTRnvI3HLYZveT6OqTwXS3+wmeOwcWDc -C/Vkw85DvG1xudLeJ1uK6NjGruFZfc8oLTW4lVYa8bJYS7cSN8h8s+1LgOGN+jIj -tm+3SJUIsUROhYw6AlQgL9+/V087OpAh18EmNVQg7Mc/R+zvWr9LesGtOxdQXGLY -D0tK3Cv6brxzks3sx1DoQZbXqX5t2Okdj4q1uViSukqSKwxW/YDrCPBeKW4bHAyv -j5OJrdu9o54hyokZ7N+1wxrrFv54NkzWbtA+FxyQF2smuvt6L78RHBgOLXMDj6Dl -NaBa4kx1HXHhOThTeEDMg5PXCp6dW4+K5OXgSORIskfNTip1KnvyIvbJvgmRlld6 -iIis7nCs+dwp4wwcOxJORNanTrAmyPPZGpeRaOrvjUYG0lZFWJo8DA+DuAUlwznP -O6Q0ibd5Ei9Hxeepl2n8pndntd978XplFeRhVmUCAwEAAaNCMEAwDgYDVR0PAQH/ -BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFIHEjMz15DD/pQwIX4wV -ZyF0Ad/fMA0GCSqGSIb3DQEBCwUAA4ICAQATZSL1jiutROTL/7lo5sOASD0Ee/oj -L3rtNtqyzm325p7lX1iPyzcyochltq44PTUbPrw7tgTQvPlJ9Zv3hcU2tsu8+Mg5 -1eRfB70VVJd0ysrtT7q6ZHafgbiERUlMjW+i67HM0cOU2kTC5uLqGOiiHycFutfl -1qnN3e92mI0ADs0b+gO3joBYDic/UvuUospeZcnWhNq5NXHzJsBPd+aBJ9J3O5oU -b3n09tDh05S60FdRvScFDcH9yBIw7m+NESsIndTUv4BFFJqIRNow6rSn4+7vW4LV -PtateJLbXDzz2K36uGt/xDYotgIVilQsnLAXc47QN6MUPJiVAAwpBVueSUmxX8fj -y88nZY41F7dXyDDZQVu5FLbowg+UMaeUmMxq67XhJ/UQqAHojhJi6IjMtX9Gl8Cb -EGY4GjZGXyJoPd/JxhMnq1MGrKI8hgZlb7F+sSlEmqO6SWkoaY/X5V+tBIZkbxqg -DMUIYs6Ao9Dz7GjevjPHF1t/gMRMTLGmhIrDO7gJzRSBuhjjVFc2/tsvfEehOjPI -+Vg7RE+xygKJBJYoaMVLuCaJu9YzL1DV/pqJuhgyklTGW+Cd+V7lDSKb9triyCGy -YiGqhkCyLmTTX8jjfhFnRR8F/uOi77Oos/N9j/gMHyIfLXC0uAE0djAA5SN4p1bX -UB+K+wb1whnw0A== ------END CERTIFICATE----- - -# Issuer: CN=UCA Extended Validation Root O=UniTrust -# Subject: CN=UCA Extended Validation Root O=UniTrust -# Label: "UCA Extended Validation Root" -# Serial: 106100277556486529736699587978573607008 -# MD5 Fingerprint: a1:f3:5f:43:c6:34:9b:da:bf:8c:7e:05:53:ad:96:e2 -# SHA1 Fingerprint: a3:a1:b0:6f:24:61:23:4a:e3:36:a5:c2:37:fc:a6:ff:dd:f0:d7:3a -# SHA256 Fingerprint: d4:3a:f9:b3:54:73:75:5c:96:84:fc:06:d7:d8:cb:70:ee:5c:28:e7:73:fb:29:4e:b4:1e:e7:17:22:92:4d:24 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgIQT9Irj/VkyDOeTzRYZiNwYDANBgkqhkiG9w0BAQsFADBH -MQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNVBAMMHFVDQSBF -eHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwHhcNMTUwMzEzMDAwMDAwWhcNMzgxMjMx -MDAwMDAwWjBHMQswCQYDVQQGEwJDTjERMA8GA1UECgwIVW5pVHJ1c3QxJTAjBgNV -BAMMHFVDQSBFeHRlbmRlZCBWYWxpZGF0aW9uIFJvb3QwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCpCQcoEwKwmeBkqh5DFnpzsZGgdT6o+uM4AHrsiWog -D4vFsJszA1qGxliG1cGFu0/GnEBNyr7uaZa4rYEwmnySBesFK5pI0Lh2PpbIILvS -sPGP2KxFRv+qZ2C0d35qHzwaUnoEPQc8hQ2E0B92CvdqFN9y4zR8V05WAT558aop -O2z6+I9tTcg1367r3CTueUWnhbYFiN6IXSV8l2RnCdm/WhUFhvMJHuxYMjMR83dk -sHYf5BA1FxvyDrFspCqjc/wJHx4yGVMR59mzLC52LqGj3n5qiAno8geK+LLNEOfi -c0CTuwjRP+H8C5SzJe98ptfRr5//lpr1kXuYC3fUfugH0mK1lTnj8/FtDw5lhIpj -VMWAtuCeS31HJqcBCF3RiJ7XwzJE+oJKCmhUfzhTA8ykADNkUVkLo4KRel7sFsLz -KuZi2irbWWIQJUoqgQtHB0MGcIfS+pMRKXpITeuUx3BNr2fVUbGAIAEBtHoIppB/ -TuDvB0GHr2qlXov7z1CymlSvw4m6WC31MJixNnI5fkkE/SmnTHnkBVfblLkWU41G -sx2VYVdWf6/wFlthWG82UBEL2KwrlRYaDh8IzTY0ZRBiZtWAXxQgXy0MoHgKaNYs -1+lvK9JKBZP8nm9rZ/+I8U6laUpSNwXqxhaN0sSZ0YIrO7o1dfdRUVjzyAfd5LQD -fwIDAQABo0IwQDAdBgNVHQ4EFgQU2XQ65DA9DfcS3H5aBZ8eNJr34RQwDwYDVR0T -AQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAYYwDQYJKoZIhvcNAQELBQADggIBADaN -l8xCFWQpN5smLNb7rhVpLGsaGvdftvkHTFnq88nIua7Mui563MD1sC3AO6+fcAUR -ap8lTwEpcOPlDOHqWnzcSbvBHiqB9RZLcpHIojG5qtr8nR/zXUACE/xOHAbKsxSQ -VBcZEhrxH9cMaVr2cXj0lH2RC47skFSOvG+hTKv8dGT9cZr4QQehzZHkPJrgmzI5 -c6sq1WnIeJEmMX3ixzDx/BR4dxIOE/TdFpS/S2d7cFOFyrC78zhNLJA5wA3CXWvp -4uXViI3WLL+rG761KIcSF3Ru/H38j9CHJrAb+7lsq+KePRXBOy5nAliRn+/4Qh8s -t2j1da3Ptfb/EX3C8CSlrdP6oDyp+l3cpaDvRKS+1ujl5BOWF3sGPjLtx7dCvHaj -2GU4Kzg1USEODm8uNBNA4StnDG1KQTAYI1oyVZnJF+A83vbsea0rWBmirSwiGpWO -vpaQXUJXxPkUAzUrHC1RVwinOt4/5Mi0A3PCwSaAuwtCH60NryZy2sy+s6ODWA2C -xR9GUeOcGMyNm43sSet1UNWMKFnKdDTajAshqx7qG+XH/RU+wBeq+yNuJkbL+vmx -cmtpzyKEC2IPrNkZAJSidjzULZrtBJ4tBmIQN1IchXIbJ+XMxjHsN+xjWZsLHXbM -fjKaiJUINlK73nZfdklJrX+9ZSCyycErdhh2n1ax ------END CERTIFICATE----- - -# Issuer: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Subject: CN=Certigna Root CA O=Dhimyotis OU=0002 48146308100036 -# Label: "Certigna Root CA" -# Serial: 269714418870597844693661054334862075617 -# MD5 Fingerprint: 0e:5c:30:62:27:eb:5b:bc:d7:ae:62:ba:e9:d5:df:77 -# SHA1 Fingerprint: 2d:0d:52:14:ff:9e:ad:99:24:01:74:20:47:6e:6c:85:27:27:f5:43 -# SHA256 Fingerprint: d4:8d:3d:23:ee:db:50:a4:59:e5:51:97:60:1c:27:77:4b:9d:7b:18:c9:4d:5a:05:95:11:a1:02:50:b9:31:68 ------BEGIN CERTIFICATE----- -MIIGWzCCBEOgAwIBAgIRAMrpG4nxVQMNo+ZBbcTjpuEwDQYJKoZIhvcNAQELBQAw -WjELMAkGA1UEBhMCRlIxEjAQBgNVBAoMCURoaW15b3RpczEcMBoGA1UECwwTMDAw -MiA0ODE0NjMwODEwMDAzNjEZMBcGA1UEAwwQQ2VydGlnbmEgUm9vdCBDQTAeFw0x -MzEwMDEwODMyMjdaFw0zMzEwMDEwODMyMjdaMFoxCzAJBgNVBAYTAkZSMRIwEAYD -VQQKDAlEaGlteW90aXMxHDAaBgNVBAsMEzAwMDIgNDgxNDYzMDgxMDAwMzYxGTAX -BgNVBAMMEENlcnRpZ25hIFJvb3QgQ0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAw -ggIKAoICAQDNGDllGlmx6mQWDoyUJJV8g9PFOSbcDO8WV43X2KyjQn+Cyu3NW9sO -ty3tRQgXstmzy9YXUnIo245Onoq2C/mehJpNdt4iKVzSs9IGPjA5qXSjklYcoW9M -CiBtnyN6tMbaLOQdLNyzKNAT8kxOAkmhVECe5uUFoC2EyP+YbNDrihqECB63aCPu -I9Vwzm1RaRDuoXrC0SIxwoKF0vJVdlB8JXrJhFwLrN1CTivngqIkicuQstDuI7pm -TLtipPlTWmR7fJj6o0ieD5Wupxj0auwuA0Wv8HT4Ks16XdG+RCYyKfHx9WzMfgIh -C59vpD++nVPiz32pLHxYGpfhPTc3GGYo0kDFUYqMwy3OU4gkWGQwFsWq4NYKpkDf -ePb1BHxpE4S80dGnBs8B92jAqFe7OmGtBIyT46388NtEbVncSVmurJqZNjBBe3Yz -IoejwpKGbvlw7q6Hh5UbxHq9MfPU0uWZ/75I7HX1eBYdpnDBfzwboZL7z8g81sWT -Co/1VTp2lc5ZmIoJlXcymoO6LAQ6l73UL77XbJuiyn1tJslV1c/DeVIICZkHJC1k -JWumIWmbat10TWuXekG9qxf5kBdIjzb5LdXF2+6qhUVB+s06RbFo5jZMm5BX7CO5 -hwjCxAnxl4YqKE3idMDaxIzb3+KhF1nOJFl0Mdp//TBt2dzhauH8XwIDAQABo4IB -GjCCARYwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE -FBiHVuBud+4kNTxOc5of1uHieX4rMB8GA1UdIwQYMBaAFBiHVuBud+4kNTxOc5of -1uHieX4rMEQGA1UdIAQ9MDswOQYEVR0gADAxMC8GCCsGAQUFBwIBFiNodHRwczov -L3d3d3cuY2VydGlnbmEuZnIvYXV0b3JpdGVzLzBtBgNVHR8EZjBkMC+gLaArhilo -dHRwOi8vY3JsLmNlcnRpZ25hLmZyL2NlcnRpZ25hcm9vdGNhLmNybDAxoC+gLYYr -aHR0cDovL2NybC5kaGlteW90aXMuY29tL2NlcnRpZ25hcm9vdGNhLmNybDANBgkq -hkiG9w0BAQsFAAOCAgEAlLieT/DjlQgi581oQfccVdV8AOItOoldaDgvUSILSo3L -6btdPrtcPbEo/uRTVRPPoZAbAh1fZkYJMyjhDSSXcNMQH+pkV5a7XdrnxIxPTGRG -HVyH41neQtGbqH6mid2PHMkwgu07nM3A6RngatgCdTer9zQoKJHyBApPNeNgJgH6 -0BGM+RFq7q89w1DTj18zeTyGqHNFkIwgtnJzFyO+B2XleJINugHA64wcZr+shncB -lA2c5uk5jR+mUYyZDDl34bSb+hxnV29qao6pK0xXeXpXIs/NX2NGjVxZOob4Mkdi -o2cNGJHc+6Zr9UhhcyNZjgKnvETq9Emd8VRY+WCv2hikLyhF3HqgiIZd8zvn/yk1 -gPxkQ5Tm4xxvvq0OKmOZK8l+hfZx6AYDlf7ej0gcWtSS6Cvu5zHbugRqh5jnxV/v -faci9wHYTfmJ0A6aBVmknpjZbyvKcL5kwlWj9Omvw5Ip3IgWJJk8jSaYtlu3zM63 -Nwf9JtmYhST/WSMDmu2dnajkXjjO11INb9I/bbEFa0nOipFGc/T2L/Coc3cOZayh -jWZSaX5LaAzHHjcng6WMxwLkFM1JAbBzs/3GkDpv0mztO+7skb6iQ12LAEpmJURw -3kAP+HwV96LOPNdeE4yBFxgX0b3xdxA61GU5wSesVywlVP+i2k+KYTlerj1KjL0= ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign Root CA - G1 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign Root CA - G1" -# Serial: 235931866688319308814040 -# MD5 Fingerprint: 9c:42:84:57:dd:cb:0b:a7:2e:95:ad:b6:f3:da:bc:ac -# SHA1 Fingerprint: 8a:c7:ad:8f:73:ac:4e:c1:b5:75:4d:a5:40:f4:fc:cf:7c:b5:8e:8c -# SHA256 Fingerprint: 40:f6:af:03:46:a9:9a:a1:cd:1d:55:5a:4e:9c:ce:62:c7:f9:63:46:03:ee:40:66:15:83:3d:c8:c8:d0:03:67 ------BEGIN CERTIFICATE----- -MIIDlDCCAnygAwIBAgIKMfXkYgxsWO3W2DANBgkqhkiG9w0BAQsFADBnMQswCQYD -VQQGEwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBU -ZWNobm9sb2dpZXMgTGltaXRlZDEcMBoGA1UEAxMTZW1TaWduIFJvb3QgQ0EgLSBH -MTAeFw0xODAyMTgxODMwMDBaFw00MzAyMTgxODMwMDBaMGcxCzAJBgNVBAYTAklO -MRMwEQYDVQQLEwplbVNpZ24gUEtJMSUwIwYDVQQKExxlTXVkaHJhIFRlY2hub2xv -Z2llcyBMaW1pdGVkMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEcxMIIBIjAN -BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAk0u76WaK7p1b1TST0Bsew+eeuGQz -f2N4aLTNLnF115sgxk0pvLZoYIr3IZpWNVrzdr3YzZr/k1ZLpVkGoZM0Kd0WNHVO -8oG0x5ZOrRkVUkr+PHB1cM2vK6sVmjM8qrOLqs1D/fXqcP/tzxE7lM5OMhbTI0Aq -d7OvPAEsbO2ZLIvZTmmYsvePQbAyeGHWDV/D+qJAkh1cF+ZwPjXnorfCYuKrpDhM -tTk1b+oDafo6VGiFbdbyL0NVHpENDtjVaqSW0RM8LHhQ6DqS0hdW5TUaQBw+jSzt -Od9C4INBdN+jzcKGYEho42kLVACL5HZpIQ15TjQIXhTCzLG3rdd8cIrHhQIDAQAB -o0IwQDAdBgNVHQ4EFgQU++8Nhp6w492pufEhF38+/PB3KxowDgYDVR0PAQH/BAQD -AgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggEBAFn/8oz1h31x -PaOfG1vR2vjTnGs2vZupYeveFix0PZ7mddrXuqe8QhfnPZHr5X3dPpzxz5KsbEjM -wiI/aTvFthUvozXGaCocV685743QNcMYDHsAVhzNixl03r4PEuDQqqE/AjSxcM6d -GNYIAwlG7mDgfrbESQRRfXBgvKqy/3lyeqYdPV8q+Mri/Tm3R7nrft8EI6/6nAYH -6ftjk4BAtcZsCjEozgyfz7MjNYBBjWzEN3uBL4ChQEKF6dk4jeihU80Bv2noWgby -RQuQ+q7hv53yrlc8pa6yVvSLZUDp/TGBLPQ5Cdjua6e0ph0VpZj3AYHYhX3zUVxx -iN66zB+Afko= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Subject: CN=emSign ECC Root CA - G3 O=eMudhra Technologies Limited OU=emSign PKI -# Label: "emSign ECC Root CA - G3" -# Serial: 287880440101571086945156 -# MD5 Fingerprint: ce:0b:72:d1:9f:88:8e:d0:50:03:e8:e3:b8:8b:67:40 -# SHA1 Fingerprint: 30:43:fa:4f:f2:57:dc:a0:c3:80:ee:2e:58:ea:78:b2:3f:e6:bb:c1 -# SHA256 Fingerprint: 86:a1:ec:ba:08:9c:4a:8d:3b:be:27:34:c6:12:ba:34:1d:81:3e:04:3c:f9:e8:a8:62:cd:5c:57:a3:6b:be:6b ------BEGIN CERTIFICATE----- -MIICTjCCAdOgAwIBAgIKPPYHqWhwDtqLhDAKBggqhkjOPQQDAzBrMQswCQYDVQQG -EwJJTjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNo -bm9sb2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0g -RzMwHhcNMTgwMjE4MTgzMDAwWhcNNDMwMjE4MTgzMDAwWjBrMQswCQYDVQQGEwJJ -TjETMBEGA1UECxMKZW1TaWduIFBLSTElMCMGA1UEChMcZU11ZGhyYSBUZWNobm9s -b2dpZXMgTGltaXRlZDEgMB4GA1UEAxMXZW1TaWduIEVDQyBSb290IENBIC0gRzMw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAQjpQy4LRL1KPOxst3iAhKAnjlfSU2fySU0 -WXTsuwYc58Byr+iuL+FBVIcUqEqy6HyC5ltqtdyzdc6LBtCGI79G1Y4PPwT01xyS -fvalY8L1X44uT6EYGQIrMgqCZH0Wk9GjQjBAMB0GA1UdDgQWBBR8XQKEE9TMipuB -zhccLikenEhjQjAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggq -hkjOPQQDAwNpADBmAjEAvvNhzwIQHWSVB7gYboiFBS+DCBeQyh+KTOgNG3qxrdWB -CUfvO6wIBHxcmbHtRwfSAjEAnbpV/KlK6O3t5nYBQnvI+GDZjVGLVTv7jHvrZQnD -+JbNR6iC8hZVdyR+EhCVBCyj ------END CERTIFICATE----- - -# Issuer: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign Root CA - C1 O=eMudhra Inc OU=emSign PKI -# Label: "emSign Root CA - C1" -# Serial: 825510296613316004955058 -# MD5 Fingerprint: d8:e3:5d:01:21:fa:78:5a:b0:df:ba:d2:ee:2a:5f:68 -# SHA1 Fingerprint: e7:2e:f1:df:fc:b2:09:28:cf:5d:d4:d5:67:37:b1:51:cb:86:4f:01 -# SHA256 Fingerprint: 12:56:09:aa:30:1d:a0:a2:49:b9:7a:82:39:cb:6a:34:21:6f:44:dc:ac:9f:39:54:b1:42:92:f2:e8:c8:60:8f ------BEGIN CERTIFICATE----- -MIIDczCCAlugAwIBAgILAK7PALrEzzL4Q7IwDQYJKoZIhvcNAQELBQAwVjELMAkG -A1UEBhMCVVMxEzARBgNVBAsTCmVtU2lnbiBQS0kxFDASBgNVBAoTC2VNdWRocmEg -SW5jMRwwGgYDVQQDExNlbVNpZ24gUm9vdCBDQSAtIEMxMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowVjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMRwwGgYDVQQDExNlbVNpZ24gUm9v -dCBDQSAtIEMxMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz+upufGZ -BczYKCFK83M0UYRWEPWgTywS4/oTmifQz/l5GnRfHXk5/Fv4cI7gklL35CX5VIPZ -HdPIWoU/Xse2B+4+wM6ar6xWQio5JXDWv7V7Nq2s9nPczdcdioOl+yuQFTdrHCZH -3DspVpNqs8FqOp099cGXOFgFixwR4+S0uF2FHYP+eF8LRWgYSKVGczQ7/g/IdrvH -GPMF0Ybzhe3nudkyrVWIzqa2kbBPrH4VI5b2P/AgNBbeCsbEBEV5f6f9vtKppa+c -xSMq9zwhbL2vj07FOrLzNBL834AaSaTUqZX3noleoomslMuoaJuvimUnzYnu3Yy1 -aylwQ6BpC+S5DwIDAQABo0IwQDAdBgNVHQ4EFgQU/qHgcB4qAzlSWkK+XJGFehiq -TbUwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEL -BQADggEBAMJKVvoVIXsoounlHfv4LcQ5lkFMOycsxGwYFYDGrK9HWS8mC+M2sO87 -/kOXSTKZEhVb3xEp/6tT+LvBeA+snFOvV71ojD1pM/CjoCNjO2RnIkSt1XHLVip4 -kqNPEjE2NuLe/gDEo2APJ62gsIq1NnpSob0n9CAnYuhNlCQT5AoE6TyrLshDCUrG -YQTlSTR+08TI9Q/Aqum6VF7zYytPT1DU/rl7mYw9wC68AivTxEDkigcxHpvOJpkT -+xHqmiIMERnHXhuBUDDIlhJu58tBf5E7oke3VIAb3ADMmpDqw8NQBmIMMMAVSKeo -WXzhriKi4gp6D/piq1JM4fHfyr6DDUI= ------END CERTIFICATE----- - -# Issuer: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Subject: CN=emSign ECC Root CA - C3 O=eMudhra Inc OU=emSign PKI -# Label: "emSign ECC Root CA - C3" -# Serial: 582948710642506000014504 -# MD5 Fingerprint: 3e:53:b3:a3:81:ee:d7:10:f8:d3:b0:1d:17:92:f5:d5 -# SHA1 Fingerprint: b6:af:43:c2:9b:81:53:7d:f6:ef:6b:c3:1f:1f:60:15:0c:ee:48:66 -# SHA256 Fingerprint: bc:4d:80:9b:15:18:9d:78:db:3e:1d:8c:f4:f9:72:6a:79:5d:a1:64:3c:a5:f1:35:8e:1d:db:0e:dc:0d:7e:b3 ------BEGIN CERTIFICATE----- -MIICKzCCAbGgAwIBAgIKe3G2gla4EnycqDAKBggqhkjOPQQDAzBaMQswCQYDVQQG -EwJVUzETMBEGA1UECxMKZW1TaWduIFBLSTEUMBIGA1UEChMLZU11ZGhyYSBJbmMx -IDAeBgNVBAMTF2VtU2lnbiBFQ0MgUm9vdCBDQSAtIEMzMB4XDTE4MDIxODE4MzAw -MFoXDTQzMDIxODE4MzAwMFowWjELMAkGA1UEBhMCVVMxEzARBgNVBAsTCmVtU2ln -biBQS0kxFDASBgNVBAoTC2VNdWRocmEgSW5jMSAwHgYDVQQDExdlbVNpZ24gRUND -IFJvb3QgQ0EgLSBDMzB2MBAGByqGSM49AgEGBSuBBAAiA2IABP2lYa57JhAd6bci -MK4G9IGzsUJxlTm801Ljr6/58pc1kjZGDoeVjbk5Wum739D+yAdBPLtVb4Ojavti -sIGJAnB9SMVK4+kiVCJNk7tCDK93nCOmfddhEc5lx/h//vXyqaNCMEAwHQYDVR0O -BBYEFPtaSNCAIEDyqOkAB2kZd6fmw/TPMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMB -Af8EBTADAQH/MAoGCCqGSM49BAMDA2gAMGUCMQC02C8Cif22TGK6Q04ThHK1rt0c -3ta13FaPWEBaLd4gTCKDypOofu4SQMfWh0/434UCMBwUZOR8loMRnLDRWmFLpg9J -0wD8ofzkpf9/rdcw0Md3f76BB1UwUCAU9Vc4CqgxUQ== ------END CERTIFICATE----- - -# Issuer: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Subject: CN=Hongkong Post Root CA 3 O=Hongkong Post -# Label: "Hongkong Post Root CA 3" -# Serial: 46170865288971385588281144162979347873371282084 -# MD5 Fingerprint: 11:fc:9f:bd:73:30:02:8a:fd:3f:f3:58:b9:cb:20:f0 -# SHA1 Fingerprint: 58:a2:d0:ec:20:52:81:5b:c1:f3:f8:64:02:24:4e:c2:8e:02:4b:02 -# SHA256 Fingerprint: 5a:2f:c0:3f:0c:83:b0:90:bb:fa:40:60:4b:09:88:44:6c:76:36:18:3d:f9:84:6e:17:10:1a:44:7f:b8:ef:d6 ------BEGIN CERTIFICATE----- -MIIFzzCCA7egAwIBAgIUCBZfikyl7ADJk0DfxMauI7gcWqQwDQYJKoZIhvcNAQEL -BQAwbzELMAkGA1UEBhMCSEsxEjAQBgNVBAgTCUhvbmcgS29uZzESMBAGA1UEBxMJ -SG9uZyBLb25nMRYwFAYDVQQKEw1Ib25na29uZyBQb3N0MSAwHgYDVQQDExdIb25n -a29uZyBQb3N0IFJvb3QgQ0EgMzAeFw0xNzA2MDMwMjI5NDZaFw00MjA2MDMwMjI5 -NDZaMG8xCzAJBgNVBAYTAkhLMRIwEAYDVQQIEwlIb25nIEtvbmcxEjAQBgNVBAcT -CUhvbmcgS29uZzEWMBQGA1UEChMNSG9uZ2tvbmcgUG9zdDEgMB4GA1UEAxMXSG9u -Z2tvbmcgUG9zdCBSb290IENBIDMwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK -AoICAQCziNfqzg8gTr7m1gNt7ln8wlffKWihgw4+aMdoWJwcYEuJQwy51BWy7sFO -dem1p+/l6TWZ5Mwc50tfjTMwIDNT2aa71T4Tjukfh0mtUC1Qyhi+AViiE3CWu4mI -VoBc+L0sPOFMV4i707mV78vH9toxdCim5lSJ9UExyuUmGs2C4HDaOym71QP1mbpV -9WTRYA6ziUm4ii8F0oRFKHyPaFASePwLtVPLwpgchKOesL4jpNrcyCse2m5FHomY -2vkALgbpDDtw1VAliJnLzXNg99X/NWfFobxeq81KuEXryGgeDQ0URhLj0mRiikKY -vLTGCAj4/ahMZJx2Ab0vqWwzD9g/KLg8aQFChn5pwckGyuV6RmXpwtZQQS4/t+Tt -bNe/JgERohYpSms0BpDsE9K2+2p20jzt8NYt3eEV7KObLyzJPivkaTv/ciWxNoZb -x39ri1UbSsUgYT2uy1DhCDq+sI9jQVMwCFk8mB13umOResoQUGC/8Ne8lYePl8X+ -l2oBlKN8W4UdKjk60FSh0Tlxnf0h+bV78OLgAo9uliQlLKAeLKjEiafv7ZkGL7YK -TE/bosw3Gq9HhS2KX8Q0NEwA/RiTZxPRN+ZItIsGxVd7GYYKecsAyVKvQv83j+Gj -Hno9UKtjBucVtT+2RTeUN7F+8kjDf8V1/peNRY8apxpyKBpADwIDAQABo2MwYTAP -BgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAfBgNVHSMEGDAWgBQXnc0e -i9Y5K3DTXNSguB+wAPzFYTAdBgNVHQ4EFgQUF53NHovWOStw01zUoLgfsAD8xWEw -DQYJKoZIhvcNAQELBQADggIBAFbVe27mIgHSQpsY1Q7XZiNc4/6gx5LS6ZStS6LG -7BJ8dNVI0lkUmcDrudHr9EgwW62nV3OZqdPlt9EuWSRY3GguLmLYauRwCy0gUCCk -MpXRAJi70/33MvJJrsZ64Ee+bs7Lo3I6LWldy8joRTnU+kLBEUx3XZL7av9YROXr -gZ6voJmtvqkBZss4HTzfQx/0TW60uhdG/H39h4F5ag0zD/ov+BS5gLNdTaqX4fnk -GMX41TiMJjz98iji7lpJiCzfeT2OnpA8vUFKOt1b9pq0zj8lMH8yfaIDlNDceqFS -3m6TjRgm/VWsvY+b0s+v54Ysyx8Jb6NvqYTUc79NoXQbTiNg8swOqn+knEwlqLJm -Ozj/2ZQw9nKEvmhVEA/GcywWaZMH/rFF7buiVWqw2rVKAiUnhde3t4ZEFolsgCs+ -l6mc1X5VTMbeRRAc6uk7nwNT7u56AQIWeNTowr5GdogTPyK7SBIdUgC0An4hGh6c -JfTzPV4e0hz5sy229zdcxsshTrD3mUcYhcErulWuBurQB7Lcq9CClnXO0lD+mefP -L5/ndtFhKvshuzHQqp9HpLIiyhY6UFfEW0NnxWViA0kB60PZ2Pierc+xYw5F9KBa -LJstxabArahH9CdMOA0uG0k7UvToiIMrVCjU8jVStDKDYmlkDJGcn5fqdBb9HxEG -mpv0 ------END CERTIFICATE----- - -# Issuer: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Subject: CN=Entrust Root Certification Authority - G4 O=Entrust, Inc. OU=See www.entrust.net/legal-terms/(c) 2015 Entrust, Inc. - for authorized use only -# Label: "Entrust Root Certification Authority - G4" -# Serial: 289383649854506086828220374796556676440 -# MD5 Fingerprint: 89:53:f1:83:23:b7:7c:8e:05:f1:8c:71:38:4e:1f:88 -# SHA1 Fingerprint: 14:88:4e:86:26:37:b0:26:af:59:62:5c:40:77:ec:35:29:ba:96:01 -# SHA256 Fingerprint: db:35:17:d1:f6:73:2a:2d:5a:b9:7c:53:3e:c7:07:79:ee:32:70:a6:2f:b4:ac:42:38:37:24:60:e6:f0:1e:88 ------BEGIN CERTIFICATE----- -MIIGSzCCBDOgAwIBAgIRANm1Q3+vqTkPAAAAAFVlrVgwDQYJKoZIhvcNAQELBQAw -gb4xCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQL -Ex9TZWUgd3d3LmVudHJ1c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykg -MjAxNSBFbnRydXN0LCBJbmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAw -BgNVBAMTKUVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0 -MB4XDTE1MDUyNzExMTExNloXDTM3MTIyNzExNDExNlowgb4xCzAJBgNVBAYTAlVT -MRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMSgwJgYDVQQLEx9TZWUgd3d3LmVudHJ1 -c3QubmV0L2xlZ2FsLXRlcm1zMTkwNwYDVQQLEzAoYykgMjAxNSBFbnRydXN0LCBJ -bmMuIC0gZm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxMjAwBgNVBAMTKUVudHJ1c3Qg -Um9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEc0MIICIjANBgkqhkiG9w0B -AQEFAAOCAg8AMIICCgKCAgEAsewsQu7i0TD/pZJH4i3DumSXbcr3DbVZwbPLqGgZ -2K+EbTBwXX7zLtJTmeH+H17ZSK9dE43b/2MzTdMAArzE+NEGCJR5WIoV3imz/f3E -T+iq4qA7ec2/a0My3dl0ELn39GjUu9CH1apLiipvKgS1sqbHoHrmSKvS0VnM1n4j -5pds8ELl3FFLFUHtSUrJ3hCX1nbB76W1NhSXNdh4IjVS70O92yfbYVaCNNzLiGAM -C1rlLAHGVK/XqsEQe9IFWrhAnoanw5CGAlZSCXqc0ieCU0plUmr1POeo8pyvi73T -DtTUXm6Hnmo9RR3RXRv06QqsYJn7ibT/mCzPfB3pAqoEmh643IhuJbNsZvc8kPNX -wbMv9W3y+8qh+CmdRouzavbmZwe+LGcKKh9asj5XxNMhIWNlUpEbsZmOeX7m640A -2Vqq6nPopIICR5b+W45UYaPrL0swsIsjdXJ8ITzI9vF01Bx7owVV7rtNOzK+mndm -nqxpkCIHH2E6lr7lmk/MBTwoWdPBDFSoWWG9yHJM6Nyfh3+9nEg2XpWjDrk4JFX8 -dWbrAuMINClKxuMrLzOg2qOGpRKX/YAr2hRC45K9PvJdXmd0LhyIRyk0X+IyqJwl -N4y6mACXi0mWHv0liqzc2thddG5msP9E36EYxr5ILzeUePiVSj9/E15dWf10hkNj -c0kCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFJ84xFYjwznooHFs6FRM5Og6sb9nMA0GCSqGSIb3DQEBCwUAA4ICAQAS -5UKme4sPDORGpbZgQIeMJX6tuGguW8ZAdjwD+MlZ9POrYs4QjbRaZIxowLByQzTS -Gwv2LFPSypBLhmb8qoMi9IsabyZIrHZ3CL/FmFz0Jomee8O5ZDIBf9PD3Vht7LGr -hFV0d4QEJ1JrhkzO3bll/9bGXp+aEJlLdWr+aumXIOTkdnrG0CSqkM0gkLpHZPt/ -B7NTeLUKYvJzQ85BK4FqLoUWlFPUa19yIqtRLULVAJyZv967lDtX/Zr1hstWO1uI -AeV8KEsD+UmDfLJ/fOPtjqF/YFOOVZ1QNBIPt5d7bIdKROf1beyAN/BYGW5KaHbw -H5Lk6rWS02FREAutp9lfx1/cH6NcjKF+m7ee01ZvZl4HliDtC3T7Zk6LERXpgUl+ -b7DUUH8i119lAg2m9IUe2K4GS0qn0jFmwvjO5QimpAKWRGhXxNUzzxkvFMSUHHuk -2fCfDrGA4tGeEWSpiBE6doLlYsKA2KSD7ZPvfC+QsDJMlhVoSFLUmQjAJOgc47Ol -IQ6SwJAfzyBfyjs4x7dtOvPmRLgOMWuIjnDrnBdSqEGULoe256YSxXXfW8AKbnuk -5F6G+TaU33fD6Q3AOfF5u0aOq0NZJ7cguyPpVkAh7DE9ZapD8j3fcEThuk0mEDuY -n/PIjhs4ViFqUZPTkcpG2om3PVODLAgfi49T3f+sHw== ------END CERTIFICATE----- - -# Issuer: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft ECC Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft ECC Root Certificate Authority 2017" -# Serial: 136839042543790627607696632466672567020 -# MD5 Fingerprint: dd:a1:03:e6:4a:93:10:d1:bf:f0:19:42:cb:fe:ed:67 -# SHA1 Fingerprint: 99:9a:64:c3:7f:f4:7d:9f:ab:95:f1:47:69:89:14:60:ee:c4:c3:c5 -# SHA256 Fingerprint: 35:8d:f3:9d:76:4a:f9:e1:b7:66:e9:c9:72:df:35:2e:e1:5c:fa:c2:27:af:6a:d1:d7:0e:8e:4a:6e:dc:ba:02 ------BEGIN CERTIFICATE----- -MIICWTCCAd+gAwIBAgIQZvI9r4fei7FK6gxXMQHC7DAKBggqhkjOPQQDAzBlMQsw -CQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYD -VQQDEy1NaWNyb3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIw -MTcwHhcNMTkxMjE4MjMwNjQ1WhcNNDIwNzE4MjMxNjA0WjBlMQswCQYDVQQGEwJV -UzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1NaWNy -b3NvZnQgRUNDIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwdjAQBgcq -hkjOPQIBBgUrgQQAIgNiAATUvD0CQnVBEyPNgASGAlEvaqiBYgtlzPbKnR5vSmZR -ogPZnZH6thaxjG7efM3beaYvzrvOcS/lpaso7GMEZpn4+vKTEAXhgShC48Zo9OYb -hGBKia/teQ87zvH2RPUBeMCjVDBSMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8E -BTADAQH/MB0GA1UdDgQWBBTIy5lycFIM+Oa+sgRXKSrPQhDtNTAQBgkrBgEEAYI3 -FQEEAwIBADAKBggqhkjOPQQDAwNoADBlAjBY8k3qDPlfXu5gKcs68tvWMoQZP3zV -L8KxzJOuULsJMsbG7X7JNpQS5GiFBqIb0C8CMQCZ6Ra0DvpWSNSkMBaReNtUjGUB -iudQZsIxtzm6uBoiB078a1QWIP8rtedMDE2mT3M= ------END CERTIFICATE----- - -# Issuer: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Subject: CN=Microsoft RSA Root Certificate Authority 2017 O=Microsoft Corporation -# Label: "Microsoft RSA Root Certificate Authority 2017" -# Serial: 40975477897264996090493496164228220339 -# MD5 Fingerprint: 10:ff:00:ff:cf:c9:f8:c7:7a:c0:ee:35:8e:c9:0f:47 -# SHA1 Fingerprint: 73:a5:e6:4a:3b:ff:83:16:ff:0e:dc:cc:61:8a:90:6e:4e:ae:4d:74 -# SHA256 Fingerprint: c7:41:f7:0f:4b:2a:8d:88:bf:2e:71:c1:41:22:ef:53:ef:10:eb:a0:cf:a5:e6:4c:fa:20:f4:18:85:30:73:e0 ------BEGIN CERTIFICATE----- -MIIFqDCCA5CgAwIBAgIQHtOXCV/YtLNHcB6qvn9FszANBgkqhkiG9w0BAQwFADBl -MQswCQYDVQQGEwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYw -NAYDVQQDEy1NaWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5 -IDIwMTcwHhcNMTkxMjE4MjI1MTIyWhcNNDIwNzE4MjMwMDIzWjBlMQswCQYDVQQG -EwJVUzEeMBwGA1UEChMVTWljcm9zb2Z0IENvcnBvcmF0aW9uMTYwNAYDVQQDEy1N -aWNyb3NvZnQgUlNBIFJvb3QgQ2VydGlmaWNhdGUgQXV0aG9yaXR5IDIwMTcwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKW76UM4wplZEWCpW9R2LBifOZ -Nt9GkMml7Xhqb0eRaPgnZ1AzHaGm++DlQ6OEAlcBXZxIQIJTELy/xztokLaCLeX0 -ZdDMbRnMlfl7rEqUrQ7eS0MdhweSE5CAg2Q1OQT85elss7YfUJQ4ZVBcF0a5toW1 -HLUX6NZFndiyJrDKxHBKrmCk3bPZ7Pw71VdyvD/IybLeS2v4I2wDwAW9lcfNcztm -gGTjGqwu+UcF8ga2m3P1eDNbx6H7JyqhtJqRjJHTOoI+dkC0zVJhUXAoP8XFWvLJ -jEm7FFtNyP9nTUwSlq31/niol4fX/V4ggNyhSyL71Imtus5Hl0dVe49FyGcohJUc -aDDv70ngNXtk55iwlNpNhTs+VcQor1fznhPbRiefHqJeRIOkpcrVE7NLP8TjwuaG -YaRSMLl6IE9vDzhTyzMMEyuP1pq9KsgtsRx9S1HKR9FIJ3Jdh+vVReZIZZ2vUpC6 -W6IYZVcSn2i51BVrlMRpIpj0M+Dt+VGOQVDJNE92kKz8OMHY4Xu54+OU4UZpyw4K -UGsTuqwPN1q3ErWQgR5WrlcihtnJ0tHXUeOrO8ZV/R4O03QK0dqq6mm4lyiPSMQH -+FJDOvTKVTUssKZqwJz58oHhEmrARdlns87/I6KJClTUFLkqqNfs+avNJVgyeY+Q -W5g5xAgGwax/Dj0ApQIDAQABo1QwUjAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/ -BAUwAwEB/zAdBgNVHQ4EFgQUCctZf4aycI8awznjwNnpv7tNsiMwEAYJKwYBBAGC -NxUBBAMCAQAwDQYJKoZIhvcNAQEMBQADggIBAKyvPl3CEZaJjqPnktaXFbgToqZC -LgLNFgVZJ8og6Lq46BrsTaiXVq5lQ7GPAJtSzVXNUzltYkyLDVt8LkS/gxCP81OC -gMNPOsduET/m4xaRhPtthH80dK2Jp86519efhGSSvpWhrQlTM93uCupKUY5vVau6 -tZRGrox/2KJQJWVggEbbMwSubLWYdFQl3JPk+ONVFT24bcMKpBLBaYVu32TxU5nh -SnUgnZUP5NbcA/FZGOhHibJXWpS2qdgXKxdJ5XbLwVaZOjex/2kskZGT4d9Mozd2 -TaGf+G0eHdP67Pv0RR0Tbc/3WeUiJ3IrhvNXuzDtJE3cfVa7o7P4NHmJweDyAmH3 -pvwPuxwXC65B2Xy9J6P9LjrRk5Sxcx0ki69bIImtt2dmefU6xqaWM/5TkshGsRGR -xpl/j8nWZjEgQRCHLQzWwa80mMpkg/sTV9HB8Dx6jKXB/ZUhoHHBk2dxEuqPiApp -GWSZI1b7rCoucL5mxAyE7+WL85MB+GqQk2dLsmijtWKP6T+MejteD+eMuMZ87zf9 -dOLITzNy4ZQ5bb0Sr74MTnB8G2+NszKTc0QWbej09+CVgI+WXTik9KveCjCHk9hN -AHFiRSdLOkKEW39lt2c0Ui2cFmuqqNh7o0JMcccMyj6D5KbvtwEwXlGjefVwaaZB -RA+GsCyRxj3qrg+E ------END CERTIFICATE----- - -# Issuer: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Subject: CN=e-Szigno Root CA 2017 O=Microsec Ltd. -# Label: "e-Szigno Root CA 2017" -# Serial: 411379200276854331539784714 -# MD5 Fingerprint: de:1f:f6:9e:84:ae:a7:b4:21:ce:1e:58:7d:d1:84:98 -# SHA1 Fingerprint: 89:d4:83:03:4f:9e:9a:48:80:5f:72:37:d4:a9:a6:ef:cb:7c:1f:d1 -# SHA256 Fingerprint: be:b0:0b:30:83:9b:9b:c3:2c:32:e4:44:79:05:95:06:41:f2:64:21:b1:5e:d0:89:19:8b:51:8a:e2:ea:1b:99 ------BEGIN CERTIFICATE----- -MIICQDCCAeWgAwIBAgIMAVRI7yH9l1kN9QQKMAoGCCqGSM49BAMCMHExCzAJBgNV -BAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMgTHRk -LjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25vIFJv -b3QgQ0EgMjAxNzAeFw0xNzA4MjIxMjA3MDZaFw00MjA4MjIxMjA3MDZaMHExCzAJ -BgNVBAYTAkhVMREwDwYDVQQHDAhCdWRhcGVzdDEWMBQGA1UECgwNTWljcm9zZWMg -THRkLjEXMBUGA1UEYQwOVkFUSFUtMjM1ODQ0OTcxHjAcBgNVBAMMFWUtU3ppZ25v -IFJvb3QgQ0EgMjAxNzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABJbcPYrYsHtv -xie+RJCxs1YVe45DJH0ahFnuY2iyxl6H0BVIHqiQrb1TotreOpCmYF9oMrWGQd+H -Wyx7xf58etqjYzBhMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0G -A1UdDgQWBBSHERUI0arBeAyxr87GyZDvvzAEwDAfBgNVHSMEGDAWgBSHERUI0arB -eAyxr87GyZDvvzAEwDAKBggqhkjOPQQDAgNJADBGAiEAtVfd14pVCzbhhkT61Nlo -jbjcI4qKDdQvfepz7L9NbKgCIQDLpbQS+ue16M9+k/zzNY9vTlp8tLxOsvxyqltZ -+efcMQ== ------END CERTIFICATE----- - -# Issuer: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Subject: O=CERTSIGN SA OU=certSIGN ROOT CA G2 -# Label: "certSIGN Root CA G2" -# Serial: 313609486401300475190 -# MD5 Fingerprint: 8c:f1:75:8a:c6:19:cf:94:b7:f7:65:20:87:c3:97:c7 -# SHA1 Fingerprint: 26:f9:93:b4:ed:3d:28:27:b0:b9:4b:a7:e9:15:1d:a3:8d:92:e5:32 -# SHA256 Fingerprint: 65:7c:fe:2f:a7:3f:aa:38:46:25:71:f3:32:a2:36:3a:46:fc:e7:02:09:51:71:07:02:cd:fb:b6:ee:da:33:05 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIJEQA0tk7GNi02MA0GCSqGSIb3DQEBCwUAMEExCzAJBgNV -BAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJR04g -Uk9PVCBDQSBHMjAeFw0xNzAyMDYwOTI3MzVaFw00MjAyMDYwOTI3MzVaMEExCzAJ -BgNVBAYTAlJPMRQwEgYDVQQKEwtDRVJUU0lHTiBTQTEcMBoGA1UECxMTY2VydFNJ -R04gUk9PVCBDQSBHMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAMDF -dRmRfUR0dIf+DjuW3NgBFszuY5HnC2/OOwppGnzC46+CjobXXo9X69MhWf05N0Iw -vlDqtg+piNguLWkh59E3GE59kdUWX2tbAMI5Qw02hVK5U2UPHULlj88F0+7cDBrZ -uIt4ImfkabBoxTzkbFpG583H+u/E7Eu9aqSs/cwoUe+StCmrqzWaTOTECMYmzPhp -n+Sc8CnTXPnGFiWeI8MgwT0PPzhAsP6CRDiqWhqKa2NYOLQV07YRaXseVO6MGiKs -cpc/I1mbySKEwQdPzH/iV8oScLumZfNpdWO9lfsbl83kqK/20U6o2YpxJM02PbyW -xPFsqa7lzw1uKA2wDrXKUXt4FMMgL3/7FFXhEZn91QqhngLjYl/rNUssuHLoPj1P -rCy7Lobio3aP5ZMqz6WryFyNSwb/EkaseMsUBzXgqd+L6a8VTxaJW732jcZZroiF -DsGJ6x9nxUWO/203Nit4ZoORUSs9/1F3dmKh7Gc+PoGD4FapUB8fepmrY7+EF3fx -DTvf95xhszWYijqy7DwaNz9+j5LP2RIUZNoQAhVB/0/E6xyjyfqZ90bp4RjZsbgy -LcsUDFDYg2WD7rlcz8sFWkz6GZdr1l0T08JcVLwyc6B49fFtHsufpaafItzRUZ6C -eWRgKRM+o/1Pcmqr4tTluCRVLERLiohEnMqE0yo7AgMBAAGjQjBAMA8GA1UdEwEB -/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSCIS1mxteg4BXrzkwJ -d8RgnlRuAzANBgkqhkiG9w0BAQsFAAOCAgEAYN4auOfyYILVAzOBywaK8SJJ6ejq -kX/GM15oGQOGO0MBzwdw5AgeZYWR5hEit/UCI46uuR59H35s5r0l1ZUa8gWmr4UC -b6741jH/JclKyMeKqdmfS0mbEVeZkkMR3rYzpMzXjWR91M08KCy0mpbqTfXERMQl -qiCA2ClV9+BB/AYm/7k29UMUA2Z44RGx2iBfRgB4ACGlHgAoYXhvqAEBj500mv/0 -OJD7uNGzcgbJceaBxXntC6Z58hMLnPddDnskk7RI24Zf3lCGeOdA5jGokHZwYa+c -NywRtYK3qq4kNFtyDGkNzVmf9nGvnAvRCjj5BiKDUyUM/FHE5r7iOZULJK2v0ZXk -ltd0ZGtxTgI8qoXzIKNDOXZbbFD+mpwUHmUUihW9o4JFWklWatKcsWMy5WHgUyIO -pwpJ6st+H6jiYoD2EEVSmAYY3qXNL3+q1Ok+CHLsIwMCPKaq2LxndD0UF/tUSxfj -03k9bWtJySgOLnRQvwzZRjoQhsmnP+mg7H/rpXdYaXHmgwo38oZJar55CJD2AhZk -PuXaTH4MNMn5X7azKFGnpyuqSfqNZSlO42sTp5SjLVFteAxEy9/eCG/Oo2Sr05WE -1LlSVHJ7liXMvGnjSG4N0MedJ5qq+BOS3R7fY581qRY27Iy4g/Q9iY/NtBde17MX -QRBdJ3NghVdJIgc= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global Certification Authority" -# Serial: 1846098327275375458322922162 -# MD5 Fingerprint: f8:1c:18:2d:2f:ba:5f:6d:a1:6c:bc:c7:ab:91:c7:0e -# SHA1 Fingerprint: 2f:8f:36:4f:e1:58:97:44:21:59:87:a5:2a:9a:d0:69:95:26:7f:b5 -# SHA256 Fingerprint: 97:55:20:15:f5:dd:fc:3c:87:88:c0:06:94:45:55:40:88:94:45:00:84:f1:00:86:70:86:bc:1a:2b:b5:8d:c8 ------BEGIN CERTIFICATE----- -MIIF2jCCA8KgAwIBAgIMBfcOhtpJ80Y1LrqyMA0GCSqGSIb3DQEBCwUAMIGIMQsw -CQYDVQQGEwJVUzERMA8GA1UECAwISWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28x -ITAfBgNVBAoMGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1 -c3R3YXZlIEdsb2JhbCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0xNzA4MjMx -OTM0MTJaFw00MjA4MjMxOTM0MTJaMIGIMQswCQYDVQQGEwJVUzERMA8GA1UECAwI -SWxsaW5vaXMxEDAOBgNVBAcMB0NoaWNhZ28xITAfBgNVBAoMGFRydXN0d2F2ZSBI -b2xkaW5ncywgSW5jLjExMC8GA1UEAwwoVHJ1c3R3YXZlIEdsb2JhbCBDZXJ0aWZp -Y2F0aW9uIEF1dGhvcml0eTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIB -ALldUShLPDeS0YLOvR29zd24q88KPuFd5dyqCblXAj7mY2Hf8g+CY66j96xz0Xzn -swuvCAAJWX/NKSqIk4cXGIDtiLK0thAfLdZfVaITXdHG6wZWiYj+rDKd/VzDBcdu -7oaJuogDnXIhhpCujwOl3J+IKMujkkkP7NAP4m1ET4BqstTnoApTAbqOl5F2brz8 -1Ws25kCI1nsvXwXoLG0R8+eyvpJETNKXpP7ScoFDB5zpET71ixpZfR9oWN0EACyW -80OzfpgZdNmcc9kYvkHHNHnZ9GLCQ7mzJ7Aiy/k9UscwR7PJPrhq4ufogXBeQotP -JqX+OsIgbrv4Fo7NDKm0G2x2EOFYeUY+VM6AqFcJNykbmROPDMjWLBz7BegIlT1l -RtzuzWniTY+HKE40Cz7PFNm73bZQmq131BnW2hqIyE4bJ3XYsgjxroMwuREOzYfw -hI0Vcnyh78zyiGG69Gm7DIwLdVcEuE4qFC49DxweMqZiNu5m4iK4BUBjECLzMx10 -coos9TkpoNPnG4CELcU9402x/RpvumUHO1jsQkUm+9jaJXLE9gCxInm943xZYkqc -BW89zubWR2OZxiRvchLIrH+QtAuRcOi35hYQcRfO3gZPSEF9NUqjifLJS3tBEW1n -twiYTOURGa5CgNz7kAXU+FDKvuStx8KU1xad5hePrzb7AgMBAAGjQjBAMA8GA1Ud -EwEB/wQFMAMBAf8wHQYDVR0OBBYEFJngGWcNYtt2s9o9uFvo/ULSMQ6HMA4GA1Ud -DwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAmHNw4rDT7TnsTGDZqRKGFx6W -0OhUKDtkLSGm+J1WE2pIPU/HPinbbViDVD2HfSMF1OQc3Og4ZYbFdada2zUFvXfe -uyk3QAUHw5RSn8pk3fEbK9xGChACMf1KaA0HZJDmHvUqoai7PF35owgLEQzxPy0Q -lG/+4jSHg9bP5Rs1bdID4bANqKCqRieCNqcVtgimQlRXtpla4gt5kNdXElE1GYhB -aCXUNxeEFfsBctyV3lImIJgm4nb1J2/6ADtKYdkNy1GTKv0WBpanI5ojSP5RvbbE -sLFUzt5sQa0WZ37b/TjNuThOssFgy50X31ieemKyJo90lZvkWx3SD92YHJtZuSPT -MaCm/zjdzyBP6VhWOmfD0faZmZ26NraAL4hHT4a/RDqA5Dccprrql5gR0IRiR2Qe -qu5AvzSxnI9O4fKSTx+O856X3vOmeWqJcU9LJxdI/uz0UA9PSX3MReO9ekDFQdxh -VicGaeVyQYHTtgGJoC86cnn+OjC/QezHYj6RS8fZMXZC+fc8Y+wmjHMMfRod6qh8 -h6jCJ3zhM0EPz8/8AKAigJ5Kp28AsEFFtyLKaEjFQqKu3R3y4G5OBVixwJAWKqQ9 -EEC+j2Jjg6mcgn0tAumDMHzLJ8n9HmYAsC7TIS+OMxZsmO0QqAfWzJPP29FpHOTK -yeC2nOnOcXHebD8WpHk= ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P256 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P256 Certification Authority" -# Serial: 4151900041497450638097112925 -# MD5 Fingerprint: 5b:44:e3:8d:5d:36:86:26:e8:0d:05:d2:59:a7:83:54 -# SHA1 Fingerprint: b4:90:82:dd:45:0c:be:8b:5b:b1:66:d3:e2:a4:08:26:cd:ed:42:cf -# SHA256 Fingerprint: 94:5b:bc:82:5e:a5:54:f4:89:d1:fd:51:a7:3d:df:2e:a6:24:ac:70:19:a0:52:05:22:5c:22:a7:8c:cf:a8:b4 ------BEGIN CERTIFICATE----- -MIICYDCCAgegAwIBAgIMDWpfCD8oXD5Rld9dMAoGCCqGSM49BAMCMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM1MTBaFw00MjA4MjMxOTM1MTBaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDI1NiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTBZMBMGByqGSM49AgEGCCqG -SM49AwEHA0IABH77bOYj43MyCMpg5lOcunSNGLB4kFKA3TjASh3RqMyTpJcGOMoN -FWLGjgEqZZ2q3zSRLoHB5DOSMcT9CTqmP62jQzBBMA8GA1UdEwEB/wQFMAMBAf8w -DwYDVR0PAQH/BAUDAwcGADAdBgNVHQ4EFgQUo0EGrJBt0UrrdaVKEJmzsaGLSvcw -CgYIKoZIzj0EAwIDRwAwRAIgB+ZU2g6gWrKuEZ+Hxbb/ad4lvvigtwjzRM4q3wgh -DDcCIC0mA6AFvWvR9lz4ZcyGbbOcNEhjhAnFjXca4syc4XR7 ------END CERTIFICATE----- - -# Issuer: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Subject: CN=Trustwave Global ECC P384 Certification Authority O=Trustwave Holdings, Inc. -# Label: "Trustwave Global ECC P384 Certification Authority" -# Serial: 2704997926503831671788816187 -# MD5 Fingerprint: ea:cf:60:c4:3b:b9:15:29:40:a1:97:ed:78:27:93:d6 -# SHA1 Fingerprint: e7:f3:a3:c8:cf:6f:c3:04:2e:6d:0e:67:32:c5:9e:68:95:0d:5e:d2 -# SHA256 Fingerprint: 55:90:38:59:c8:c0:c3:eb:b8:75:9e:ce:4e:25:57:22:5f:f5:75:8b:bd:38:eb:d4:82:76:60:1e:1b:d5:80:97 ------BEGIN CERTIFICATE----- -MIICnTCCAiSgAwIBAgIMCL2Fl2yZJ6SAaEc7MAoGCCqGSM49BAMDMIGRMQswCQYD -VQQGEwJVUzERMA8GA1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAf -BgNVBAoTGFRydXN0d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3 -YXZlIEdsb2JhbCBFQ0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0x -NzA4MjMxOTM2NDNaFw00MjA4MjMxOTM2NDNaMIGRMQswCQYDVQQGEwJVUzERMA8G -A1UECBMISWxsaW5vaXMxEDAOBgNVBAcTB0NoaWNhZ28xITAfBgNVBAoTGFRydXN0 -d2F2ZSBIb2xkaW5ncywgSW5jLjE6MDgGA1UEAxMxVHJ1c3R3YXZlIEdsb2JhbCBF -Q0MgUDM4NCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTB2MBAGByqGSM49AgEGBSuB -BAAiA2IABGvaDXU1CDFHBa5FmVXxERMuSvgQMSOjfoPTfygIOiYaOs+Xgh+AtycJ -j9GOMMQKmw6sWASr9zZ9lCOkmwqKi6vr/TklZvFe/oyujUF5nQlgziip04pt89ZF -1PKYhDhloKNDMEEwDwYDVR0TAQH/BAUwAwEB/zAPBgNVHQ8BAf8EBQMDBwYAMB0G -A1UdDgQWBBRVqYSJ0sEyvRjLbKYHTsjnnb6CkDAKBggqhkjOPQQDAwNnADBkAjA3 -AZKXRRJ+oPM+rRk6ct30UJMDEr5E0k9BpIycnR+j9sKS50gU/k6bpZFXrsY3crsC -MGclCrEMXu6pY5Jv5ZAL/mYiykf9ijH3g/56vxC+GCsej/YpHpRZ744hN8tRmKVu -Sw== ------END CERTIFICATE----- - -# Issuer: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Subject: CN=NAVER Global Root Certification Authority O=NAVER BUSINESS PLATFORM Corp. -# Label: "NAVER Global Root Certification Authority" -# Serial: 9013692873798656336226253319739695165984492813 -# MD5 Fingerprint: c8:7e:41:f6:25:3b:f5:09:b3:17:e8:46:3d:bf:d0:9b -# SHA1 Fingerprint: 8f:6b:f2:a9:27:4a:da:14:a0:c4:f4:8e:61:27:f9:c0:1e:78:5d:d1 -# SHA256 Fingerprint: 88:f4:38:dc:f8:ff:d1:fa:8f:42:91:15:ff:e5:f8:2a:e1:e0:6e:0c:70:c3:75:fa:ad:71:7b:34:a4:9e:72:65 ------BEGIN CERTIFICATE----- -MIIFojCCA4qgAwIBAgIUAZQwHqIL3fXFMyqxQ0Rx+NZQTQ0wDQYJKoZIhvcNAQEM -BQAwaTELMAkGA1UEBhMCS1IxJjAkBgNVBAoMHU5BVkVSIEJVU0lORVNTIFBMQVRG -T1JNIENvcnAuMTIwMAYDVQQDDClOQVZFUiBHbG9iYWwgUm9vdCBDZXJ0aWZpY2F0 -aW9uIEF1dGhvcml0eTAeFw0xNzA4MTgwODU4NDJaFw0zNzA4MTgyMzU5NTlaMGkx -CzAJBgNVBAYTAktSMSYwJAYDVQQKDB1OQVZFUiBCVVNJTkVTUyBQTEFURk9STSBD -b3JwLjEyMDAGA1UEAwwpTkFWRVIgR2xvYmFsIFJvb3QgQ2VydGlmaWNhdGlvbiBB -dXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC21PGTXLVA -iQqrDZBbUGOukJR0F0Vy1ntlWilLp1agS7gvQnXp2XskWjFlqxcX0TM62RHcQDaH -38dq6SZeWYp34+hInDEW+j6RscrJo+KfziFTowI2MMtSAuXaMl3Dxeb57hHHi8lE -HoSTGEq0n+USZGnQJoViAbbJAh2+g1G7XNr4rRVqmfeSVPc0W+m/6imBEtRTkZaz -kVrd/pBzKPswRrXKCAfHcXLJZtM0l/aM9BhK4dA9WkW2aacp+yPOiNgSnABIqKYP -szuSjXEOdMWLyEz59JuOuDxp7W87UC9Y7cSw0BwbagzivESq2M0UXZR4Yb8Obtoq -vC8MC3GmsxY/nOb5zJ9TNeIDoKAYv7vxvvTWjIcNQvcGufFt7QSUqP620wbGQGHf -nZ3zVHbOUzoBppJB7ASjjw2i1QnK1sua8e9DXcCrpUHPXFNwcMmIpi3Ua2FzUCaG -YQ5fG8Ir4ozVu53BA0K6lNpfqbDKzE0K70dpAy8i+/Eozr9dUGWokG2zdLAIx6yo -0es+nPxdGoMuK8u180SdOqcXYZaicdNwlhVNt0xz7hlcxVs+Qf6sdWA7G2POAN3a -CJBitOUt7kinaxeZVL6HSuOpXgRM6xBtVNbv8ejyYhbLgGvtPe31HzClrkvJE+2K -AQHJuFFYwGY6sWZLxNUxAmLpdIQM201GLQIDAQABo0IwQDAdBgNVHQ4EFgQU0p+I -36HNLL3s9TsBAZMzJ7LrYEswDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMB -Af8wDQYJKoZIhvcNAQEMBQADggIBADLKgLOdPVQG3dLSLvCkASELZ0jKbY7gyKoN -qo0hV4/GPnrK21HUUrPUloSlWGB/5QuOH/XcChWB5Tu2tyIvCZwTFrFsDDUIbatj -cu3cvuzHV+YwIHHW1xDBE1UBjCpD5EHxzzp6U5LOogMFDTjfArsQLtk70pt6wKGm -+LUx5vR1yblTmXVHIloUFcd4G7ad6Qz4G3bxhYTeodoS76TiEJd6eN4MUZeoIUCL -hr0N8F5OSza7OyAfikJW4Qsav3vQIkMsRIz75Sq0bBwcupTgE34h5prCy8VCZLQe -lHsIJchxzIdFV4XTnyliIoNRlwAYl3dqmJLJfGBs32x9SuRwTMKeuB330DTHD8z7 -p/8Dvq1wkNoL3chtl1+afwkyQf3NosxabUzyqkn+Zvjp2DXrDige7kgvOtB5CTh8 -piKCk5XQA76+AqAF3SAi428diDRgxuYKuQl1C/AH6GmWNcf7I4GOODm4RStDeKLR -LBT/DShycpWbXgnbiUSYqqFJu3FS8r/2/yehNq+4tneI3TqkbZs0kNwUXTC/t+sX -5Ie3cdCh13cV1ELX8vMxmV2b3RZtP+oGI/hGoiLtk/bdmuYqh7GYVPEi92tF4+KO -dh2ajcQGjTa3FPOdVGm3jjzVpG2Tgbet9r1ke8LJaDmgkpzNNIaRkPpkUZ3+/uul -9XXeifdy ------END CERTIFICATE----- - -# Issuer: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Subject: CN=AC RAIZ FNMT-RCM SERVIDORES SEGUROS O=FNMT-RCM OU=Ceres -# Label: "AC RAIZ FNMT-RCM SERVIDORES SEGUROS" -# Serial: 131542671362353147877283741781055151509 -# MD5 Fingerprint: 19:36:9c:52:03:2f:d2:d1:bb:23:cc:dd:1e:12:55:bb -# SHA1 Fingerprint: 62:ff:d9:9e:c0:65:0d:03:ce:75:93:d2:ed:3f:2d:32:c9:e3:e5:4a -# SHA256 Fingerprint: 55:41:53:b1:3d:2c:f9:dd:b7:53:bf:be:1a:4e:0a:e0:8d:0a:a4:18:70:58:fe:60:a2:b8:62:b2:e4:b8:7b:cb ------BEGIN CERTIFICATE----- -MIICbjCCAfOgAwIBAgIQYvYybOXE42hcG2LdnC6dlTAKBggqhkjOPQQDAzB4MQsw -CQYDVQQGEwJFUzERMA8GA1UECgwIRk5NVC1SQ00xDjAMBgNVBAsMBUNlcmVzMRgw -FgYDVQRhDA9WQVRFUy1RMjgyNjAwNEoxLDAqBgNVBAMMI0FDIFJBSVogRk5NVC1S -Q00gU0VSVklET1JFUyBTRUdVUk9TMB4XDTE4MTIyMDA5MzczM1oXDTQzMTIyMDA5 -MzczM1oweDELMAkGA1UEBhMCRVMxETAPBgNVBAoMCEZOTVQtUkNNMQ4wDAYDVQQL -DAVDZXJlczEYMBYGA1UEYQwPVkFURVMtUTI4MjYwMDRKMSwwKgYDVQQDDCNBQyBS -QUlaIEZOTVQtUkNNIFNFUlZJRE9SRVMgU0VHVVJPUzB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPa6V1PIyqvfNkpSIeSX0oNnnvBlUdBeh8dHsVnyV0ebAAKTRBdp20LH -sbI6GA60XYyzZl2hNPk2LEnb80b8s0RpRBNm/dfF/a82Tc4DTQdxz69qBdKiQ1oK -Um8BA06Oi6NCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD -VR0OBBYEFAG5L++/EYZg8k/QQW6rcx/n0m5JMAoGCCqGSM49BAMDA2kAMGYCMQCu -SuMrQMN0EfKVrRYj3k4MGuZdpSRea0R7/DjiT8ucRRcRTBQnJlU5dUoDzBOQn5IC -MQD6SmxgiHPz7riYYqnOK8LZiqZwMR2vsJRM60/G49HzYqc8/5MuB1xJAWdpEgJy -v+c= ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root R46 O=GlobalSign nv-sa -# Label: "GlobalSign Root R46" -# Serial: 1552617688466950547958867513931858518042577 -# MD5 Fingerprint: c4:14:30:e4:fa:66:43:94:2a:6a:1b:24:5f:19:d0:ef -# SHA1 Fingerprint: 53:a2:b0:4b:ca:6b:d6:45:e6:39:8a:8e:c4:0d:d2:bf:77:c3:a2:90 -# SHA256 Fingerprint: 4f:a3:12:6d:8d:3a:11:d1:c4:85:5a:4f:80:7c:ba:d6:cf:91:9d:3a:5a:88:b0:3b:ea:2c:63:72:d9:3c:40:c9 ------BEGIN CERTIFICATE----- -MIIFWjCCA0KgAwIBAgISEdK7udcjGJ5AXwqdLdDfJWfRMA0GCSqGSIb3DQEBDAUA -MEYxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYD -VQQDExNHbG9iYWxTaWduIFJvb3QgUjQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMy -MDAwMDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYt -c2ExHDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCsrHQy6LNl5brtQyYdpokNRbopiLKkHWPd08EsCVeJ -OaFV6Wc0dwxu5FUdUiXSE2te4R2pt32JMl8Nnp8semNgQB+msLZ4j5lUlghYruQG -vGIFAha/r6gjA7aUD7xubMLL1aa7DOn2wQL7Id5m3RerdELv8HQvJfTqa1VbkNud -316HCkD7rRlr+/fKYIje2sGP1q7Vf9Q8g+7XFkyDRTNrJ9CG0Bwta/OrffGFqfUo -0q3v84RLHIf8E6M6cqJaESvWJ3En7YEtbWaBkoe0G1h6zD8K+kZPTXhc+CtI4wSE -y132tGqzZfxCnlEmIyDLPRT5ge1lFgBPGmSXZgjPjHvjK8Cd+RTyG/FWaha/LIWF -zXg4mutCagI0GIMXTpRW+LaCtfOW3T3zvn8gdz57GSNrLNRyc0NXfeD412lPFzYE -+cCQYDdF3uYM2HSNrpyibXRdQr4G9dlkbgIQrImwTDsHTUB+JMWKmIJ5jqSngiCN -I/onccnfxkF0oE32kRbcRoxfKWMxWXEM2G/CtjJ9++ZdU6Z+Ffy7dXxd7Pj2Fxzs -x2sZy/N78CsHpdlseVR2bJ0cpm4O6XkMqCNqo98bMDGfsVR7/mrLZqrcZdCinkqa -ByFrgY/bxFn63iLABJzjqls2k+g9vXqhnQt2sQvHnf3PmKgGwvgqo6GDoLclcqUC -4wIDAQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQUA1yrc4GHqMywptWU4jaWSf8FmSwwDQYJKoZIhvcNAQEMBQADggIBAHx4 -7PYCLLtbfpIrXTncvtgdokIzTfnvpCo7RGkerNlFo048p9gkUbJUHJNOxO97k4Vg -JuoJSOD1u8fpaNK7ajFxzHmuEajwmf3lH7wvqMxX63bEIaZHU1VNaL8FpO7XJqti -2kM3S+LGteWygxk6x9PbTZ4IevPuzz5i+6zoYMzRx6Fcg0XERczzF2sUyQQCPtIk -pnnpHs6i58FZFZ8d4kuaPp92CC1r2LpXFNqD6v6MVenQTqnMdzGxRBF6XLE+0xRF -FRhiJBPSy03OXIPBNvIQtQ6IbbjhVp+J3pZmOUdkLG5NrmJ7v2B0GbhWrJKsFjLt -rWhV/pi60zTe9Mlhww6G9kuEYO4Ne7UyWHmRVSyBQ7N0H3qqJZ4d16GLuc1CLgSk -ZoNNiTW2bKg2SnkheCLQQrzRQDGQob4Ez8pn7fXwgNNgyYMqIgXQBztSvwyeqiv5 -u+YfjyW6hY0XHgL+XVAEV8/+LbzvXMAaq7afJMbfc2hIkCwU9D9SGuTSyxTDYWnP -4vkYxboznxSjBF25cfe1lNj2M8FawTSLfJvdkzrnE6JwYZ+vj+vYxXX4M2bUdGc6 -N3ec592kD3ZDZopD8p/7DEJ4Y9HiD2971KE9dJeFt0g5QdYg/NA6s/rob8SKunE3 -vouXsXgxT7PntgMTzlSdriVZzH81Xwj3QEUxeCp6 ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Subject: CN=GlobalSign Root E46 O=GlobalSign nv-sa -# Label: "GlobalSign Root E46" -# Serial: 1552617690338932563915843282459653771421763 -# MD5 Fingerprint: b5:b8:66:ed:de:08:83:e3:c9:e2:01:34:06:ac:51:6f -# SHA1 Fingerprint: 39:b4:6c:d5:fe:80:06:eb:e2:2f:4a:bb:08:33:a0:af:db:b9:dd:84 -# SHA256 Fingerprint: cb:b9:c4:4d:84:b8:04:3e:10:50:ea:31:a6:9f:51:49:55:d7:bf:d2:e2:c6:b4:93:01:01:9a:d6:1d:9f:50:58 ------BEGIN CERTIFICATE----- -MIICCzCCAZGgAwIBAgISEdK7ujNu1LzmJGjFDYQdmOhDMAoGCCqGSM49BAMDMEYx -CzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRwwGgYDVQQD -ExNHbG9iYWxTaWduIFJvb3QgRTQ2MB4XDTE5MDMyMDAwMDAwMFoXDTQ2MDMyMDAw -MDAwMFowRjELMAkGA1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2Ex -HDAaBgNVBAMTE0dsb2JhbFNpZ24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAScDrHPt+ieUnd1NPqlRqetMhkytAepJ8qUuwzSChDH2omwlwxwEwkBjtjq -R+q+soArzfwoDdusvKSGN+1wCAB16pMLey5SnCNoIwZD7JIvU4Tb+0cUB+hflGdd -yXqBPCCjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1Ud -DgQWBBQxCpCPtsad0kRLgLWi5h+xEk8blTAKBggqhkjOPQQDAwNoADBlAjEA31SQ -7Zvvi5QCkxeCmb6zniz2C5GMn0oUsfZkvLtoURMMA/cVi4RguYv/Uo7njLwcAjA8 -+RHUjE7AwWHCFUyqqx0LMV87HOIAl0Qx5v5zli/altP+CAezNIm8BZ/3Hobui3A= ------END CERTIFICATE----- - -# Issuer: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Subject: CN=ANF Secure Server Root CA O=ANF Autoridad de Certificacion OU=ANF CA Raiz -# Label: "ANF Secure Server Root CA" -# Serial: 996390341000653745 -# MD5 Fingerprint: 26:a6:44:5a:d9:af:4e:2f:b2:1d:b6:65:b0:4e:e8:96 -# SHA1 Fingerprint: 5b:6e:68:d0:cc:15:b6:a0:5f:1e:c1:5f:ae:02:fc:6b:2f:5d:6f:74 -# SHA256 Fingerprint: fb:8f:ec:75:91:69:b9:10:6b:1e:51:16:44:c6:18:c5:13:04:37:3f:6c:06:43:08:8d:8b:ef:fd:1b:99:75:99 ------BEGIN CERTIFICATE----- -MIIF7zCCA9egAwIBAgIIDdPjvGz5a7EwDQYJKoZIhvcNAQELBQAwgYQxEjAQBgNV -BAUTCUc2MzI4NzUxMDELMAkGA1UEBhMCRVMxJzAlBgNVBAoTHkFORiBBdXRvcmlk -YWQgZGUgQ2VydGlmaWNhY2lvbjEUMBIGA1UECxMLQU5GIENBIFJhaXoxIjAgBgNV -BAMTGUFORiBTZWN1cmUgU2VydmVyIFJvb3QgQ0EwHhcNMTkwOTA0MTAwMDM4WhcN -MzkwODMwMTAwMDM4WjCBhDESMBAGA1UEBRMJRzYzMjg3NTEwMQswCQYDVQQGEwJF -UzEnMCUGA1UEChMeQU5GIEF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uMRQwEgYD -VQQLEwtBTkYgQ0EgUmFpejEiMCAGA1UEAxMZQU5GIFNlY3VyZSBTZXJ2ZXIgUm9v -dCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANvrayvmZFSVgpCj -cqQZAZ2cC4Ffc0m6p6zzBE57lgvsEeBbphzOG9INgxwruJ4dfkUyYA8H6XdYfp9q -yGFOtibBTI3/TO80sh9l2Ll49a2pcbnvT1gdpd50IJeh7WhM3pIXS7yr/2WanvtH -2Vdy8wmhrnZEE26cLUQ5vPnHO6RYPUG9tMJJo8gN0pcvB2VSAKduyK9o7PQUlrZX -H1bDOZ8rbeTzPvY1ZNoMHKGESy9LS+IsJJ1tk0DrtSOOMspvRdOoiXsezx76W0OL -zc2oD2rKDF65nkeP8Nm2CgtYZRczuSPkdxl9y0oukntPLxB3sY0vaJxizOBQ+OyR -p1RMVwnVdmPF6GUe7m1qzwmd+nxPrWAI/VaZDxUse6mAq4xhj0oHdkLePfTdsiQz -W7i1o0TJrH93PB0j7IKppuLIBkwC/qxcmZkLLxCKpvR/1Yd0DVlJRfbwcVw5Kda/ -SiOL9V8BY9KHcyi1Swr1+KuCLH5zJTIdC2MKF4EA/7Z2Xue0sUDKIbvVgFHlSFJn -LNJhiQcND85Cd8BEc5xEUKDbEAotlRyBr+Qc5RQe8TZBAQIvfXOn3kLMTOmJDVb3 -n5HUA8ZsyY/b2BzgQJhdZpmYgG4t/wHFzstGH6wCxkPmrqKEPMVOHj1tyRRM4y5B -u8o5vzY8KhmqQYdOpc5LMnndkEl/AgMBAAGjYzBhMB8GA1UdIwQYMBaAFJxf0Gxj -o1+TypOYCK2Mh6UsXME3MB0GA1UdDgQWBBScX9BsY6Nfk8qTmAitjIelLFzBNzAO -BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG9w0BAQsFAAOC -AgEATh65isagmD9uw2nAalxJUqzLK114OMHVVISfk/CHGT0sZonrDUL8zPB1hT+L -9IBdeeUXZ701guLyPI59WzbLWoAAKfLOKyzxj6ptBZNscsdW699QIyjlRRA96Gej -rw5VD5AJYu9LWaL2U/HANeQvwSS9eS9OICI7/RogsKQOLHDtdD+4E5UGUcjohybK -pFtqFiGS3XNgnhAY3jyB6ugYw3yJ8otQPr0R4hUDqDZ9MwFsSBXXiJCZBMXM5gf0 -vPSQ7RPi6ovDj6MzD8EpTBNO2hVWcXNyglD2mjN8orGoGjR0ZVzO0eurU+AagNjq -OknkJjCb5RyKqKkVMoaZkgoQI1YS4PbOTOK7vtuNknMBZi9iPrJyJ0U27U1W45eZ -/zo1PqVUSlJZS2Db7v54EX9K3BR5YLZrZAPbFYPhor72I5dQ8AkzNqdxliXzuUJ9 -2zg/LFis6ELhDtjTO0wugumDLmsx2d1Hhk9tl5EuT+IocTUW0fJz/iUrB0ckYyfI -+PbZa/wSMVYIwFNCr5zQM378BvAxRAMU8Vjq8moNqRGyg77FGr8H6lnco4g175x2 -MjxNBiLOFeXdntiP2t7SxDnlF4HPOEfrf4htWRvfn0IUrn7PqLBmZdo3r5+qPeoo -tt7VMVgWglvquxl1AnMaykgaIZOQCo6ThKd9OyMYkomgjaw= ------END CERTIFICATE----- - -# Issuer: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum EC-384 CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum EC-384 CA" -# Serial: 160250656287871593594747141429395092468 -# MD5 Fingerprint: b6:65:b3:96:60:97:12:a1:ec:4e:e1:3d:a3:c6:c9:f1 -# SHA1 Fingerprint: f3:3e:78:3c:ac:df:f4:a2:cc:ac:67:55:69:56:d7:e5:16:3c:e1:ed -# SHA256 Fingerprint: 6b:32:80:85:62:53:18:aa:50:d1:73:c9:8d:8b:da:09:d5:7e:27:41:3d:11:4c:f7:87:a0:f5:d0:6c:03:0c:f6 ------BEGIN CERTIFICATE----- -MIICZTCCAeugAwIBAgIQeI8nXIESUiClBNAt3bpz9DAKBggqhkjOPQQDAzB0MQsw -CQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEuMScw -JQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAXBgNVBAMT -EENlcnR1bSBFQy0zODQgQ0EwHhcNMTgwMzI2MDcyNDU0WhcNNDMwMzI2MDcyNDU0 -WjB0MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBT -LkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxGTAX -BgNVBAMTEENlcnR1bSBFQy0zODQgQ0EwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATE -KI6rGFtqvm5kN2PkzeyrOvfMobgOgknXhimfoZTy42B4mIF4Bk3y7JoOV2CDn7Tm -Fy8as10CW4kjPMIRBSqniBMY81CE1700LCeJVf/OTOffph8oxPBUw7l8t1Ot68Kj -QjBAMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI0GZnQkdjrzife81r1HfS+8 -EF9LMA4GA1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNoADBlAjADVS2m5hjEfO/J -UG7BJw+ch69u1RsIGL2SKcHvlJF40jocVYli5RsJHrpka/F2tNQCMQC0QoSZ/6vn -nvuRlydd3LBbMHHOXjgaatkl5+r3YZJW+OraNsKHZZYuciUvf9/DE8k= ------END CERTIFICATE----- - -# Issuer: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Subject: CN=Certum Trusted Root CA O=Asseco Data Systems S.A. OU=Certum Certification Authority -# Label: "Certum Trusted Root CA" -# Serial: 40870380103424195783807378461123655149 -# MD5 Fingerprint: 51:e1:c2:e7:fe:4c:84:af:59:0e:2f:f4:54:6f:ea:29 -# SHA1 Fingerprint: c8:83:44:c0:18:ae:9f:cc:f1:87:b7:8f:22:d1:c5:d7:45:84:ba:e5 -# SHA256 Fingerprint: fe:76:96:57:38:55:77:3e:37:a9:5e:7a:d4:d9:cc:96:c3:01:57:c1:5d:31:76:5b:a9:b1:57:04:e1:ae:78:fd ------BEGIN CERTIFICATE----- -MIIFwDCCA6igAwIBAgIQHr9ZULjJgDdMBvfrVU+17TANBgkqhkiG9w0BAQ0FADB6 -MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEgU3lzdGVtcyBTLkEu -MScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkxHzAdBgNV -BAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwHhcNMTgwMzE2MTIxMDEzWhcNNDMw -MzE2MTIxMDEzWjB6MQswCQYDVQQGEwJQTDEhMB8GA1UEChMYQXNzZWNvIERhdGEg -U3lzdGVtcyBTLkEuMScwJQYDVQQLEx5DZXJ0dW0gQ2VydGlmaWNhdGlvbiBBdXRo -b3JpdHkxHzAdBgNVBAMTFkNlcnR1bSBUcnVzdGVkIFJvb3QgQ0EwggIiMA0GCSqG -SIb3DQEBAQUAA4ICDwAwggIKAoICAQDRLY67tzbqbTeRn06TpwXkKQMlzhyC93yZ -n0EGze2jusDbCSzBfN8pfktlL5On1AFrAygYo9idBcEq2EXxkd7fO9CAAozPOA/q -p1x4EaTByIVcJdPTsuclzxFUl6s1wB52HO8AU5853BSlLCIls3Jy/I2z5T4IHhQq -NwuIPMqw9MjCoa68wb4pZ1Xi/K1ZXP69VyywkI3C7Te2fJmItdUDmj0VDT06qKhF -8JVOJVkdzZhpu9PMMsmN74H+rX2Ju7pgE8pllWeg8xn2A1bUatMn4qGtg/BKEiJ3 -HAVz4hlxQsDsdUaakFjgao4rpUYwBI4Zshfjvqm6f1bxJAPXsiEodg42MEx51UGa -mqi4NboMOvJEGyCI98Ul1z3G4z5D3Yf+xOr1Uz5MZf87Sst4WmsXXw3Hw09Omiqi -7VdNIuJGmj8PkTQkfVXjjJU30xrwCSss0smNtA0Aq2cpKNgB9RkEth2+dv5yXMSF -ytKAQd8FqKPVhJBPC/PgP5sZ0jeJP/J7UhyM9uH3PAeXjA6iWYEMspA90+NZRu0P -qafegGtaqge2Gcu8V/OXIXoMsSt0Puvap2ctTMSYnjYJdmZm/Bo/6khUHL4wvYBQ -v3y1zgD2DGHZ5yQD4OMBgQ692IU0iL2yNqh7XAjlRICMb/gv1SHKHRzQ+8S1h9E6 -Tsd2tTVItQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBSM+xx1 -vALTn04uSNn5YFSqxLNP+jAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQENBQAD -ggIBAEii1QALLtA/vBzVtVRJHlpr9OTy4EA34MwUe7nJ+jW1dReTagVphZzNTxl4 -WxmB82M+w85bj/UvXgF2Ez8sALnNllI5SW0ETsXpD4YN4fqzX4IS8TrOZgYkNCvo -zMrnadyHncI013nR03e4qllY/p0m+jiGPp2Kh2RX5Rc64vmNueMzeMGQ2Ljdt4NR -5MTMI9UGfOZR0800McD2RrsLrfw9EAUqO0qRJe6M1ISHgCq8CYyqOhNf6DR5UMEQ -GfnTKB7U0VEwKbOukGfWHwpjscWpxkIxYxeU72nLL/qMFH3EQxiJ2fAyQOaA4kZf -5ePBAFmo+eggvIksDkc0C+pXwlM2/KfUrzHN/gLldfq5Jwn58/U7yn2fqSLLiMmq -0Uc9NneoWWRrJ8/vJ8HjJLWG965+Mk2weWjROeiQWMODvA8s1pfrzgzhIMfatz7D -P78v3DSk+yshzWePS/Tj6tQ/50+6uaWTRRxmHyH6ZF5v4HaUMst19W7l9o/HuKTM -qJZ9ZPskWkoDbGs4xugDQ5r3V7mzKWmTOPQD8rv7gmsHINFSH5pkAnuYZttcTVoP -0ISVoDwUQwbKytu4QTbaakRnh6+v40URFWkIsr4WOZckbxJF0WddCajJFdr60qZf -E2Efv4WstK2tBZQIgx51F9NxO5NQI1mg7TyRVJ12AMXDuDjb ------END CERTIFICATE----- - -# Issuer: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Subject: CN=TunTrust Root CA O=Agence Nationale de Certification Electronique -# Label: "TunTrust Root CA" -# Serial: 108534058042236574382096126452369648152337120275 -# MD5 Fingerprint: 85:13:b9:90:5b:36:5c:b6:5e:b8:5a:f8:e0:31:57:b4 -# SHA1 Fingerprint: cf:e9:70:84:0f:e0:73:0f:9d:f6:0c:7f:2c:4b:ee:20:46:34:9c:bb -# SHA256 Fingerprint: 2e:44:10:2a:b5:8c:b8:54:19:45:1c:8e:19:d9:ac:f3:66:2c:af:bc:61:4b:6a:53:96:0a:30:f7:d0:e2:eb:41 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIUEwLV4kBMkkaGFmddtLu7sms+/BMwDQYJKoZIhvcNAQEL -BQAwYTELMAkGA1UEBhMCVE4xNzA1BgNVBAoMLkFnZW5jZSBOYXRpb25hbGUgZGUg -Q2VydGlmaWNhdGlvbiBFbGVjdHJvbmlxdWUxGTAXBgNVBAMMEFR1blRydXN0IFJv -b3QgQ0EwHhcNMTkwNDI2MDg1NzU2WhcNNDQwNDI2MDg1NzU2WjBhMQswCQYDVQQG -EwJUTjE3MDUGA1UECgwuQWdlbmNlIE5hdGlvbmFsZSBkZSBDZXJ0aWZpY2F0aW9u -IEVsZWN0cm9uaXF1ZTEZMBcGA1UEAwwQVHVuVHJ1c3QgUm9vdCBDQTCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAMPN0/y9BFPdDCA61YguBUtB9YOCfvdZ -n56eY+hz2vYGqU8ftPkLHzmMmiDQfgbU7DTZhrx1W4eI8NLZ1KMKsmwb60ksPqxd -2JQDoOw05TDENX37Jk0bbjBU2PWARZw5rZzJJQRNmpA+TkBuimvNKWfGzC3gdOgF -VwpIUPp6Q9p+7FuaDmJ2/uqdHYVy7BG7NegfJ7/Boce7SBbdVtfMTqDhuazb1YMZ -GoXRlJfXyqNlC/M4+QKu3fZnz8k/9YosRxqZbwUN/dAdgjH8KcwAWJeRTIAAHDOF -li/LQcKLEITDCSSJH7UP2dl3RxiSlGBcx5kDPP73lad9UKGAwqmDrViWVSHbhlnU -r8a83YFuB9tgYv7sEG7aaAH0gxupPqJbI9dkxt/con3YS7qC0lH4Zr8GRuR5KiY2 -eY8fTpkdso8MDhz/yV3A/ZAQprE38806JG60hZC/gLkMjNWb1sjxVj8agIl6qeIb -MlEsPvLfe/ZdeikZjuXIvTZxi11Mwh0/rViizz1wTaZQmCXcI/m4WEEIcb9PuISg -jwBUFfyRbVinljvrS5YnzWuioYasDXxU5mZMZl+QviGaAkYt5IPCgLnPSz7ofzwB -7I9ezX/SKEIBlYrilz0QIX32nRzFNKHsLA4KUiwSVXAkPcvCFDVDXSdOvsC9qnyW -5/yeYa1E0wCXAgMBAAGjYzBhMB0GA1UdDgQWBBQGmpsfU33x9aTI04Y+oXNZtPdE -ITAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFAaamx9TffH1pMjThj6hc1m0 -90QhMA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAqgVutt0Vyb+z -xiD2BkewhpMl0425yAA/l/VSJ4hxyXT968pk21vvHl26v9Hr7lxpuhbI87mP0zYu -QEkHDVneixCwSQXi/5E/S7fdAo74gShczNxtr18UnH1YeA32gAm56Q6XKRm4t+v4 -FstVEuTGfbvE7Pi1HE4+Z7/FXxttbUcoqgRYYdZ2vyJ/0Adqp2RT8JeNnYA/u8EH -22Wv5psymsNUk8QcCMNE+3tjEUPRahphanltkE8pjkcFwRJpadbGNjHh/PqAulxP -xOu3Mqz4dWEX1xAZufHSCe96Qp1bWgvUxpVOKs7/B9dPfhgGiPEZtdmYu65xxBzn -dFlY7wyJz4sfdZMaBBSSSFCp61cpABbjNhzI+L/wM9VBD8TMPN3pM0MBkRArHtG5 -Xc0yGYuPjCB31yLEQtyEFpslbei0VXF/sHyz03FJuc9SpAQ/3D2gu68zngowYI7b -nV2UqL1g52KAdoGDDIzMMEZJ4gzSqK/rYXHv5yJiqfdcZGyfFoxnNidF9Ql7v/YQ -CvGwjVRDjAS6oz/v4jXH+XTgbzRB0L9zZVcg+ZtnemZoJE6AZb0QmQZZ8mWvuMZH -u/2QeItBcy6vVR/cO5JyboTT0GFMDcx2V+IthSIVNg3rAZ3r2OvEhJn7wAzMMujj -d9qDRIueVSjAi1jTkD5OGwDxFa2DK5o= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS RSA Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS RSA Root CA 2021" -# Serial: 76817823531813593706434026085292783742 -# MD5 Fingerprint: 65:47:9b:58:86:dd:2c:f0:fc:a2:84:1f:1e:96:c4:91 -# SHA1 Fingerprint: 02:2d:05:82:fa:88:ce:14:0c:06:79:de:7f:14:10:e9:45:d7:a5:6d -# SHA256 Fingerprint: d9:5d:0e:8e:da:79:52:5b:f9:be:b1:1b:14:d2:10:0d:32:94:98:5f:0c:62:d9:fa:bd:9c:d9:99:ec:cb:7b:1d ------BEGIN CERTIFICATE----- -MIIFpDCCA4ygAwIBAgIQOcqTHO9D88aOk8f0ZIk4fjANBgkqhkiG9w0BAQsFADBs -MQswCQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJl -c2VhcmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBSU0Eg -Um9vdCBDQSAyMDIxMB4XDTIxMDIxOTEwNTUzOFoXDTQ1MDIxMzEwNTUzN1owbDEL -MAkGA1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNl -YXJjaCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgUlNBIFJv -b3QgQ0EgMjAyMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAIvC569l -mwVnlskNJLnQDmT8zuIkGCyEf3dRywQRNrhe7Wlxp57kJQmXZ8FHws+RFjZiPTgE -4VGC/6zStGndLuwRo0Xua2s7TL+MjaQenRG56Tj5eg4MmOIjHdFOY9TnuEFE+2uv -a9of08WRiFukiZLRgeaMOVig1mlDqa2YUlhu2wr7a89o+uOkXjpFc5gH6l8Cct4M -pbOfrqkdtx2z/IpZ525yZa31MJQjB/OCFks1mJxTuy/K5FrZx40d/JiZ+yykgmvw -Kh+OC19xXFyuQnspiYHLA6OZyoieC0AJQTPb5lh6/a6ZcMBaD9YThnEvdmn8kN3b -LW7R8pv1GmuebxWMevBLKKAiOIAkbDakO/IwkfN4E8/BPzWr8R0RI7VDIp4BkrcY -AuUR0YLbFQDMYTfBKnya4dC6s1BG7oKsnTH4+yPiAwBIcKMJJnkVU2DzOFytOOqB -AGMUuTNe3QvboEUHGjMJ+E20pwKmafTCWQWIZYVWrkvL4N48fS0ayOn7H6NhStYq -E613TBoYm5EPWNgGVMWX+Ko/IIqmhaZ39qb8HOLubpQzKoNQhArlT4b4UEV4AIHr -W2jjJo3Me1xR9BQsQL4aYB16cmEdH2MtiKrOokWQCPxrvrNQKlr9qEgYRtaQQJKQ -CoReaDH46+0N0x3GfZkYVVYnZS6NRcUk7M7jAgMBAAGjQjBAMA8GA1UdEwEB/wQF -MAMBAf8wHQYDVR0OBBYEFApII6ZgpJIKM+qTW8VX6iVNvRLuMA4GA1UdDwEB/wQE -AwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAPpBIqm5iFSVmewzVjIuJndftTgfvnNAU -X15QvWiWkKQUEapobQk1OUAJ2vQJLDSle1mESSmXdMgHHkdt8s4cUCbjnj1AUz/3 -f5Z2EMVGpdAgS1D0NTsY9FVqQRtHBmg8uwkIYtlfVUKqrFOFrJVWNlar5AWMxaja -H6NpvVMPxP/cyuN+8kyIhkdGGvMA9YCRotxDQpSbIPDRzbLrLFPCU3hKTwSUQZqP -JzLB5UkZv/HywouoCjkxKLR9YjYsTewfM7Z+d21+UPCfDtcRj88YxeMn/ibvBZ3P -zzfF0HvaO7AWhAw6k9a+F9sPPg4ZeAnHqQJyIkv3N3a6dcSFA1pj1bF1BcK5vZSt -jBWZp5N99sXzqnTPBIWUmAD04vnKJGW/4GKvyMX6ssmeVkjaef2WdhW+o45WxLM0 -/L5H9MG0qPzVMIho7suuyWPEdr6sOBjhXlzPrjoiUevRi7PzKzMHVIf6tLITe7pT -BGIBnfHAT+7hOtSLIBD6Alfm78ELt5BGnBkpjNxvoEppaZS3JGWg/6w/zgH7IS79 -aPib8qXPMThcFarmlwDB31qlpzmq6YR/PFGoOtmUW4y/Twhx5duoXNTSpv4Ao8YW -xw/ogM4cKGR0GQjTQuPOAF1/sdwTsOEFy9EgqoZ0njnnkf3/W9b3raYvAwtt41dU -63ZTGI0RmLo= ------END CERTIFICATE----- - -# Issuer: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Subject: CN=HARICA TLS ECC Root CA 2021 O=Hellenic Academic and Research Institutions CA -# Label: "HARICA TLS ECC Root CA 2021" -# Serial: 137515985548005187474074462014555733966 -# MD5 Fingerprint: ae:f7:4c:e5:66:35:d1:b7:9b:8c:22:93:74:d3:4b:b0 -# SHA1 Fingerprint: bc:b0:c1:9d:e9:98:92:70:19:38:57:e9:8d:a7:b4:5d:6e:ee:01:48 -# SHA256 Fingerprint: 3f:99:cc:47:4a:cf:ce:4d:fe:d5:87:94:66:5e:47:8d:15:47:73:9f:2e:78:0f:1b:b4:ca:9b:13:30:97:d4:01 ------BEGIN CERTIFICATE----- -MIICVDCCAdugAwIBAgIQZ3SdjXfYO2rbIvT/WeK/zjAKBggqhkjOPQQDAzBsMQsw -CQYDVQQGEwJHUjE3MDUGA1UECgwuSGVsbGVuaWMgQWNhZGVtaWMgYW5kIFJlc2Vh -cmNoIEluc3RpdHV0aW9ucyBDQTEkMCIGA1UEAwwbSEFSSUNBIFRMUyBFQ0MgUm9v -dCBDQSAyMDIxMB4XDTIxMDIxOTExMDExMFoXDTQ1MDIxMzExMDEwOVowbDELMAkG -A1UEBhMCR1IxNzA1BgNVBAoMLkhlbGxlbmljIEFjYWRlbWljIGFuZCBSZXNlYXJj -aCBJbnN0aXR1dGlvbnMgQ0ExJDAiBgNVBAMMG0hBUklDQSBUTFMgRUNDIFJvb3Qg -Q0EgMjAyMTB2MBAGByqGSM49AgEGBSuBBAAiA2IABDgI/rGgltJ6rK9JOtDA4MM7 -KKrxcm1lAEeIhPyaJmuqS7psBAqIXhfyVYf8MLA04jRYVxqEU+kw2anylnTDUR9Y -STHMmE5gEYd103KUkE+bECUqqHgtvpBBWJAVcqeht6NCMEAwDwYDVR0TAQH/BAUw -AwEB/zAdBgNVHQ4EFgQUyRtTgRL+BNUW0aq8mm+3oJUZbsowDgYDVR0PAQH/BAQD -AgGGMAoGCCqGSM49BAMDA2cAMGQCMBHervjcToiwqfAircJRQO9gcS3ujwLEXQNw -SaSS6sUUiHCm0w2wqsosQJz76YJumgIwK0eaB8bRwoF8yguWGEEbo/QwCZ61IygN -nxS2PFOiTAZpffpskcYqSUXm7LcT4Tps ------END CERTIFICATE----- - -# Issuer: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Subject: CN=Autoridad de Certificacion Firmaprofesional CIF A62634068 -# Label: "Autoridad de Certificacion Firmaprofesional CIF A62634068" -# Serial: 1977337328857672817 -# MD5 Fingerprint: 4e:6e:9b:54:4c:ca:b7:fa:48:e4:90:b1:15:4b:1c:a3 -# SHA1 Fingerprint: 0b:be:c2:27:22:49:cb:39:aa:db:35:5c:53:e3:8c:ae:78:ff:b6:fe -# SHA256 Fingerprint: 57:de:05:83:ef:d2:b2:6e:03:61:da:99:da:9d:f4:64:8d:ef:7e:e8:44:1c:3b:72:8a:fa:9b:cd:e0:f9:b2:6a ------BEGIN CERTIFICATE----- -MIIGFDCCA/ygAwIBAgIIG3Dp0v+ubHEwDQYJKoZIhvcNAQELBQAwUTELMAkGA1UE -BhMCRVMxQjBABgNVBAMMOUF1dG9yaWRhZCBkZSBDZXJ0aWZpY2FjaW9uIEZpcm1h -cHJvZmVzaW9uYWwgQ0lGIEE2MjYzNDA2ODAeFw0xNDA5MjMxNTIyMDdaFw0zNjA1 -MDUxNTIyMDdaMFExCzAJBgNVBAYTAkVTMUIwQAYDVQQDDDlBdXRvcmlkYWQgZGUg -Q2VydGlmaWNhY2lvbiBGaXJtYXByb2Zlc2lvbmFsIENJRiBBNjI2MzQwNjgwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDKlmuO6vj78aI14H9M2uDDUtd9 -thDIAl6zQyrET2qyyhxdKJp4ERppWVevtSBC5IsP5t9bpgOSL/UR5GLXMnE42QQM -cas9UX4PB99jBVzpv5RvwSmCwLTaUbDBPLutN0pcyvFLNg4kq7/DhHf9qFD0sefG -L9ItWY16Ck6WaVICqjaY7Pz6FIMMNx/Jkjd/14Et5cS54D40/mf0PmbR0/RAz15i -NA9wBj4gGFrO93IbJWyTdBSTo3OxDqqHECNZXyAFGUftaI6SEspd/NYrspI8IM/h -X68gvqB2f3bl7BqGYTM+53u0P6APjqK5am+5hyZvQWyIplD9amML9ZMWGxmPsu2b -m8mQ9QEM3xk9Dz44I8kvjwzRAv4bVdZO0I08r0+k8/6vKtMFnXkIoctXMbScyJCy -Z/QYFpM6/EfY0XiWMR+6KwxfXZmtY4laJCB22N/9q06mIqqdXuYnin1oKaPnirja -EbsXLZmdEyRG98Xi2J+Of8ePdG1asuhy9azuJBCtLxTa/y2aRnFHvkLfuwHb9H/T -KI8xWVvTyQKmtFLKbpf7Q8UIJm+K9Lv9nyiqDdVF8xM6HdjAeI9BZzwelGSuewvF -6NkBiDkal4ZkQdU7hwxu+g/GvUgUvzlN1J5Bto+WHWOWk9mVBngxaJ43BjuAiUVh -OSPHG0SjFeUc+JIwuwIDAQABo4HvMIHsMB0GA1UdDgQWBBRlzeurNR4APn7VdMAc -tHNHDhpkLzASBgNVHRMBAf8ECDAGAQH/AgEBMIGmBgNVHSAEgZ4wgZswgZgGBFUd -IAAwgY8wLwYIKwYBBQUHAgEWI2h0dHA6Ly93d3cuZmlybWFwcm9mZXNpb25hbC5j -b20vY3BzMFwGCCsGAQUFBwICMFAeTgBQAGEAcwBlAG8AIABkAGUAIABsAGEAIABC -AG8AbgBhAG4AbwB2AGEAIAA0ADcAIABCAGEAcgBjAGUAbABvAG4AYQAgADAAOAAw -ADEANzAOBgNVHQ8BAf8EBAMCAQYwDQYJKoZIhvcNAQELBQADggIBAHSHKAIrdx9m -iWTtj3QuRhy7qPj4Cx2Dtjqn6EWKB7fgPiDL4QjbEwj4KKE1soCzC1HA01aajTNF -Sa9J8OA9B3pFE1r/yJfY0xgsfZb43aJlQ3CTkBW6kN/oGbDbLIpgD7dvlAceHabJ -hfa9NPhAeGIQcDq+fUs5gakQ1JZBu/hfHAsdCPKxsIl68veg4MSPi3i1O1ilI45P -Vf42O+AMt8oqMEEgtIDNrvx2ZnOorm7hfNoD6JQg5iKj0B+QXSBTFCZX2lSX3xZE -EAEeiGaPcjiT3SC3NL7X8e5jjkd5KAb881lFJWAiMxujX6i6KtoaPc1A6ozuBRWV -1aUsIC+nmCjuRfzxuIgALI9C2lHVnOUTaHFFQ4ueCyE8S1wF3BqfmI7avSKecs2t -CsvMo2ebKHTEm9caPARYpoKdrcd7b/+Alun4jWq9GJAd/0kakFI3ky88Al2CdgtR -5xbHV/g4+afNmyJU72OwFW1TZQNKXkqgsqeOSQBZONXH9IBk9W6VULgRfhVwOEqw -f9DEMnDAGf/JOC0ULGb0QkTmVXYbgBVX/8Cnp6o5qtjTcNAuuuuUavpfNIbnYrX9 -ivAwhZTJryQCL2/W3Wf+47BVTwSYT6RBVuKT0Gro1vP7ZeDOdcQxWQzugsgMYDNK -GbqEZycPvEJdvSRUDewdcAZfpLz6IHxV ------END CERTIFICATE----- - -# Issuer: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus ECC Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus ECC Root CA" -# Serial: 630369271402956006249506845124680065938238527194 -# MD5 Fingerprint: de:4b:c1:f5:52:8c:9b:43:e1:3e:8f:55:54:17:8d:85 -# SHA1 Fingerprint: f6:9c:db:b0:fc:f6:02:13:b6:52:32:a6:a3:91:3f:16:70:da:c3:e1 -# SHA256 Fingerprint: 30:fb:ba:2c:32:23:8e:2a:98:54:7a:f9:79:31:e5:50:42:8b:9b:3f:1c:8e:eb:66:33:dc:fa:86:c5:b2:7d:d3 ------BEGIN CERTIFICATE----- -MIICDzCCAZWgAwIBAgIUbmq8WapTvpg5Z6LSa6Q75m0c1towCgYIKoZIzj0EAwMw -RzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4xGjAY -BgNVBAMTEXZUcnVzIEVDQyBSb290IENBMB4XDTE4MDczMTA3MjY0NFoXDTQzMDcz -MTA3MjY0NFowRzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28u -LEx0ZC4xGjAYBgNVBAMTEXZUcnVzIEVDQyBSb290IENBMHYwEAYHKoZIzj0CAQYF -K4EEACIDYgAEZVBKrox5lkqqHAjDo6LN/llWQXf9JpRCux3NCNtzslt188+cToL0 -v/hhJoVs1oVbcnDS/dtitN9Ti72xRFhiQgnH+n9bEOf+QP3A2MMrMudwpremIFUd -e4BdS49nTPEQo0IwQDAdBgNVHQ4EFgQUmDnNvtiyjPeyq+GtJK97fKHbH88wDwYD -VR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwCgYIKoZIzj0EAwMDaAAwZQIw -V53dVvHH4+m4SVBrm2nDb+zDfSXkV5UTQJtS0zvzQBm8JsctBp61ezaf9SXUY2sA -AjEA6dPGnlaaKsyh2j/IZivTWJwghfqrkYpwcBE4YGQLYgmRWAD5Tfs0aNoJrSEG -GJTO ------END CERTIFICATE----- - -# Issuer: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Subject: CN=vTrus Root CA O=iTrusChina Co.,Ltd. -# Label: "vTrus Root CA" -# Serial: 387574501246983434957692974888460947164905180485 -# MD5 Fingerprint: b8:c9:37:df:fa:6b:31:84:64:c5:ea:11:6a:1b:75:fc -# SHA1 Fingerprint: 84:1a:69:fb:f5:cd:1a:25:34:13:3d:e3:f8:fc:b8:99:d0:c9:14:b7 -# SHA256 Fingerprint: 8a:71:de:65:59:33:6f:42:6c:26:e5:38:80:d0:0d:88:a1:8d:a4:c6:a9:1f:0d:cb:61:94:e2:06:c5:c9:63:87 ------BEGIN CERTIFICATE----- -MIIFVjCCAz6gAwIBAgIUQ+NxE9izWRRdt86M/TX9b7wFjUUwDQYJKoZIhvcNAQEL -BQAwQzELMAkGA1UEBhMCQ04xHDAaBgNVBAoTE2lUcnVzQ2hpbmEgQ28uLEx0ZC4x -FjAUBgNVBAMTDXZUcnVzIFJvb3QgQ0EwHhcNMTgwNzMxMDcyNDA1WhcNNDMwNzMx -MDcyNDA1WjBDMQswCQYDVQQGEwJDTjEcMBoGA1UEChMTaVRydXNDaGluYSBDby4s -THRkLjEWMBQGA1UEAxMNdlRydXMgUm9vdCBDQTCCAiIwDQYJKoZIhvcNAQEBBQAD -ggIPADCCAgoCggIBAL1VfGHTuB0EYgWgrmy3cLRB6ksDXhA/kFocizuwZotsSKYc -IrrVQJLuM7IjWcmOvFjai57QGfIvWcaMY1q6n6MLsLOaXLoRuBLpDLvPbmyAhykU -AyyNJJrIZIO1aqwTLDPxn9wsYTwaP3BVm60AUn/PBLn+NvqcwBauYv6WTEN+VRS+ -GrPSbcKvdmaVayqwlHeFXgQPYh1jdfdr58tbmnDsPmcF8P4HCIDPKNsFxhQnL4Z9 -8Cfe/+Z+M0jnCx5Y0ScrUw5XSmXX+6KAYPxMvDVTAWqXcoKv8R1w6Jz1717CbMdH -flqUhSZNO7rrTOiwCcJlwp2dCZtOtZcFrPUGoPc2BX70kLJrxLT5ZOrpGgrIDajt -J8nU57O5q4IikCc9Kuh8kO+8T/3iCiSn3mUkpF3qwHYw03dQ+A0Em5Q2AXPKBlim -0zvc+gRGE1WKyURHuFE5Gi7oNOJ5y1lKCn+8pu8fA2dqWSslYpPZUxlmPCdiKYZN -pGvu/9ROutW04o5IWgAZCfEF2c6Rsffr6TlP9m8EQ5pV9T4FFL2/s1m02I4zhKOQ -UqqzApVg+QxMaPnu1RcN+HFXtSXkKe5lXa/R7jwXC1pDxaWG6iSe4gUH3DRCEpHW -OXSuTEGC2/KmSNGzm/MzqvOmwMVO9fSddmPmAsYiS8GVP1BkLFTltvA8Kc9XAgMB -AAGjQjBAMB0GA1UdDgQWBBRUYnBj8XWEQ1iO0RYgscasGrz2iTAPBgNVHRMBAf8E -BTADAQH/MA4GA1UdDwEB/wQEAwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAKbqSSaet -8PFww+SX8J+pJdVrnjT+5hpk9jprUrIQeBqfTNqK2uwcN1LgQkv7bHbKJAs5EhWd -nxEt/Hlk3ODg9d3gV8mlsnZwUKT+twpw1aA08XXXTUm6EdGz2OyC/+sOxL9kLX1j -bhd47F18iMjrjld22VkE+rxSH0Ws8HqA7Oxvdq6R2xCOBNyS36D25q5J08FsEhvM -Kar5CKXiNxTKsbhm7xqC5PD48acWabfbqWE8n/Uxy+QARsIvdLGx14HuqCaVvIiv -TDUHKgLKeBRtRytAVunLKmChZwOgzoy8sHJnxDHO2zTlJQNgJXtxmOTAGytfdELS -S8VZCAeHvsXDf+eW2eHcKJfWjwXj9ZtOyh1QRwVTsMo554WgicEFOwE30z9J4nfr -I8iIZjs9OXYhRvHsXyO466JmdXTBQPfYaJqT4i2pLr0cox7IdMakLXogqzu4sEb9 -b91fUlV1YvCXoHzXOP0l382gmxDPi7g4Xl7FtKYCNqEeXxzP4padKar9mK5S4fNB -UvupLnKWnyfjqnN9+BojZns7q2WwMgFLFT49ok8MKzWixtlnEjUwzXYuFrOZnk1P -Ti07NEPhmg4NpGaXutIcSkwsKouLgU9xGqndXHt7CMUADTdA43x7VF8vhV929ven -sBxXVsFy6K2ir40zSbofitzmdHxghm+Hl3s= ------END CERTIFICATE----- - -# Issuer: CN=ISRG Root X2 O=Internet Security Research Group -# Subject: CN=ISRG Root X2 O=Internet Security Research Group -# Label: "ISRG Root X2" -# Serial: 87493402998870891108772069816698636114 -# MD5 Fingerprint: d3:9e:c4:1e:23:3c:a6:df:cf:a3:7e:6d:e0:14:e6:e5 -# SHA1 Fingerprint: bd:b1:b9:3c:d5:97:8d:45:c6:26:14:55:f8:db:95:c7:5a:d1:53:af -# SHA256 Fingerprint: 69:72:9b:8e:15:a8:6e:fc:17:7a:57:af:b7:17:1d:fc:64:ad:d2:8c:2f:ca:8c:f1:50:7e:34:45:3c:cb:14:70 ------BEGIN CERTIFICATE----- -MIICGzCCAaGgAwIBAgIQQdKd0XLq7qeAwSxs6S+HUjAKBggqhkjOPQQDAzBPMQsw -CQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJuZXQgU2VjdXJpdHkgUmVzZWFyY2gg -R3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBYMjAeFw0yMDA5MDQwMDAwMDBaFw00 -MDA5MTcxNjAwMDBaME8xCzAJBgNVBAYTAlVTMSkwJwYDVQQKEyBJbnRlcm5ldCBT -ZWN1cml0eSBSZXNlYXJjaCBHcm91cDEVMBMGA1UEAxMMSVNSRyBSb290IFgyMHYw -EAYHKoZIzj0CAQYFK4EEACIDYgAEzZvVn4CDCuwJSvMWSj5cz3es3mcFDR0HttwW -+1qLFNvicWDEukWVEYmO6gbf9yoWHKS5xcUy4APgHoIYOIvXRdgKam7mAHf7AlF9 -ItgKbppbd9/w+kHsOdx1ymgHDB/qo0IwQDAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0T -AQH/BAUwAwEB/zAdBgNVHQ4EFgQUfEKWrt5LSDv6kviejM9ti6lyN5UwCgYIKoZI -zj0EAwMDaAAwZQIwe3lORlCEwkSHRhtFcP9Ymd70/aTSVaYgLXTWNLxBo1BfASdW -tL4ndQavEi51mI38AjEAi/V3bNTIZargCyzuFJ0nN6T5U6VR5CmD1/iQMVtCnwr1 -/q4AaOeMSQ+2b1tbFfLn ------END CERTIFICATE----- - -# Issuer: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Subject: CN=HiPKI Root CA - G1 O=Chunghwa Telecom Co., Ltd. -# Label: "HiPKI Root CA - G1" -# Serial: 60966262342023497858655262305426234976 -# MD5 Fingerprint: 69:45:df:16:65:4b:e8:68:9a:8f:76:5f:ff:80:9e:d3 -# SHA1 Fingerprint: 6a:92:e4:a8:ee:1b:ec:96:45:37:e3:29:57:49:cd:96:e3:e5:d2:60 -# SHA256 Fingerprint: f0:15:ce:3c:c2:39:bf:ef:06:4b:e9:f1:d2:c4:17:e1:a0:26:4a:0a:94:be:1f:0c:8d:12:18:64:eb:69:49:cc ------BEGIN CERTIFICATE----- -MIIFajCCA1KgAwIBAgIQLd2szmKXlKFD6LDNdmpeYDANBgkqhkiG9w0BAQsFADBP -MQswCQYDVQQGEwJUVzEjMCEGA1UECgwaQ2h1bmdod2EgVGVsZWNvbSBDby4sIEx0 -ZC4xGzAZBgNVBAMMEkhpUEtJIFJvb3QgQ0EgLSBHMTAeFw0xOTAyMjIwOTQ2MDRa -Fw0zNzEyMzExNTU5NTlaME8xCzAJBgNVBAYTAlRXMSMwIQYDVQQKDBpDaHVuZ2h3 -YSBUZWxlY29tIENvLiwgTHRkLjEbMBkGA1UEAwwSSGlQS0kgUm9vdCBDQSAtIEcx -MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEA9B5/UnMyDHPkvRN0o9Qw -qNCuS9i233VHZvR85zkEHmpwINJaR3JnVfSl6J3VHiGh8Ge6zCFovkRTv4354twv -Vcg3Px+kwJyz5HdcoEb+d/oaoDjq7Zpy3iu9lFc6uux55199QmQ5eiY29yTw1S+6 -lZgRZq2XNdZ1AYDgr/SEYYwNHl98h5ZeQa/rh+r4XfEuiAU+TCK72h8q3VJGZDnz -Qs7ZngyzsHeXZJzA9KMuH5UHsBffMNsAGJZMoYFL3QRtU6M9/Aes1MU3guvklQgZ -KILSQjqj2FPseYlgSGDIcpJQ3AOPgz+yQlda22rpEZfdhSi8MEyr48KxRURHH+CK -FgeW0iEPU8DtqX7UTuybCeyvQqww1r/REEXgphaypcXTT3OUM3ECoWqj1jOXTyFj -HluP2cFeRXF3D4FdXyGarYPM+l7WjSNfGz1BryB1ZlpK9p/7qxj3ccC2HTHsOyDr -y+K49a6SsvfhhEvyovKTmiKe0xRvNlS9H15ZFblzqMF8b3ti6RZsR1pl8w4Rm0bZ -/W3c1pzAtH2lsN0/Vm+h+fbkEkj9Bn8SV7apI09bA8PgcSojt/ewsTu8mL3WmKgM -a/aOEmem8rJY5AIJEzypuxC00jBF8ez3ABHfZfjcK0NVvxaXxA/VLGGEqnKG/uY6 -fsI/fe78LxQ+5oXdUG+3Se0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNV -HQ4EFgQU8ncX+l6o/vY9cdVouslGDDjYr7AwDgYDVR0PAQH/BAQDAgGGMA0GCSqG -SIb3DQEBCwUAA4ICAQBQUfB13HAE4/+qddRxosuej6ip0691x1TPOhwEmSKsxBHi -7zNKpiMdDg1H2DfHb680f0+BazVP6XKlMeJ45/dOlBhbQH3PayFUhuaVevvGyuqc -SE5XCV0vrPSltJczWNWseanMX/mF+lLFjfiRFOs6DRfQUsJ748JzjkZ4Bjgs6Fza -ZsT0pPBWGTMpWmWSBUdGSquEwx4noR8RkpkndZMPvDY7l1ePJlsMu5wP1G4wB9Tc -XzZoZjmDlicmisjEOf6aIW/Vcobpf2Lll07QJNBAsNB1CI69aO4I1258EHBGG3zg -iLKecoaZAeO/n0kZtCW+VmWuF2PlHt/o/0elv+EmBYTksMCv5wiZqAxeJoBF1Pho -L5aPruJKHJwWDBNvOIf2u8g0X5IDUXlwpt/L9ZlNec1OvFefQ05rLisY+GpzjLrF -Ne85akEez3GoorKGB1s6yeHvP2UEgEcyRHCVTjFnanRbEEV16rCf0OY1/k6fi8wr -kkVbbiVghUbN0aqwdmaTd5a+g744tiROJgvM7XpWGuDpWsZkrUx6AEhEL7lAuxM+ -vhV4nYWBSipX3tUZQ9rbyltHhoMLP7YNdnhzeSJesYAfz77RP1YQmCuVh6EfnWQU -YDksswBVLuT1sw5XxJFBAJw/6KXf6vb/yPCtbVKoF6ubYfwSUTXkJf2vqmqGOQ== ------END CERTIFICATE----- - -# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign ECC Root CA - R4 -# Label: "GlobalSign ECC Root CA - R4" -# Serial: 159662223612894884239637590694 -# MD5 Fingerprint: 26:29:f8:6d:e1:88:bf:a2:65:7f:aa:c4:cd:0f:7f:fc -# SHA1 Fingerprint: 6b:a0:b0:98:e1:71:ef:5a:ad:fe:48:15:80:77:10:f4:bd:6f:0b:28 -# SHA256 Fingerprint: b0:85:d7:0b:96:4f:19:1a:73:e4:af:0d:54:ae:7a:0e:07:aa:fd:af:9b:71:dd:08:62:13:8a:b7:32:5a:24:a2 ------BEGIN CERTIFICATE----- -MIIB3DCCAYOgAwIBAgINAgPlfvU/k/2lCSGypjAKBggqhkjOPQQDAjBQMSQwIgYD -VQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0gUjQxEzARBgNVBAoTCkdsb2Jh -bFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMTIxMTEzMDAwMDAwWhcNMzgw -MTE5MDMxNDA3WjBQMSQwIgYDVQQLExtHbG9iYWxTaWduIEVDQyBSb290IENBIC0g -UjQxEzARBgNVBAoTCkdsb2JhbFNpZ24xEzARBgNVBAMTCkdsb2JhbFNpZ24wWTAT -BgcqhkjOPQIBBggqhkjOPQMBBwNCAAS4xnnTj2wlDp8uORkcA6SumuU5BwkWymOx -uYb4ilfBV85C+nOh92VC/x7BALJucw7/xyHlGKSq2XE/qNS5zowdo0IwQDAOBgNV -HQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUVLB7rUW44kB/ -+wpu+74zyTyjhNUwCgYIKoZIzj0EAwIDRwAwRAIgIk90crlgr/HmnKAWBVBfw147 -bmF0774BxL4YSFlhgjICICadVGNA3jdgUM/I2O2dgq43mLyjj0xMqTQrbO/7lZsm ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R1 O=Google Trust Services LLC -# Subject: CN=GTS Root R1 O=Google Trust Services LLC -# Label: "GTS Root R1" -# Serial: 159662320309726417404178440727 -# MD5 Fingerprint: 05:fe:d0:bf:71:a8:a3:76:63:da:01:e0:d8:52:dc:40 -# SHA1 Fingerprint: e5:8c:1c:c4:91:3b:38:63:4b:e9:10:6e:e3:ad:8e:6b:9d:d9:81:4a -# SHA256 Fingerprint: d9:47:43:2a:bd:e7:b7:fa:90:fc:2e:6b:59:10:1b:12:80:e0:e1:c7:e4:e4:0f:a3:c6:88:7f:ff:57:a7:f4:cf ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlk28xsBNJiGuiFzANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjEwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjEwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQC2EQKLHuOhd5s73L+UPreVp0A8of2C+X0yBoJx9vaMf/vo -27xqLpeXo4xL+Sv2sfnOhB2x+cWX3u+58qPpvBKJXqeqUqv4IyfLpLGcY9vXmX7w -Cl7raKb0xlpHDU0QM+NOsROjyBhsS+z8CZDfnWQpJSMHobTSPS5g4M/SCYe7zUjw -TcLCeoiKu7rPWRnWr4+wB7CeMfGCwcDfLqZtbBkOtdh+JhpFAz2weaSUKK0Pfybl -qAj+lug8aJRT7oM6iCsVlgmy4HqMLnXWnOunVmSPlk9orj2XwoSPwLxAwAtcvfaH -szVsrBhQf4TgTM2S0yDpM7xSma8ytSmzJSq0SPly4cpk9+aCEI3oncKKiPo4Zor8 -Y/kB+Xj9e1x3+naH+uzfsQ55lVe0vSbv1gHR6xYKu44LtcXFilWr06zqkUspzBmk -MiVOKvFlRNACzqrOSbTqn3yDsEB750Orp2yjj32JgfpMpf/VjsPOS+C12LOORc92 -wO1AK/1TD7Cn1TsNsYqiA94xrcx36m97PtbfkSIS5r762DL8EGMUUXLeXdYWk70p -aDPvOmbsB4om3xPXV2V4J95eSRQAogB/mqghtqmxlbCluQ0WEdrHbEg8QOB+DVrN -VjzRlwW5y0vtOUucxD/SVRNuJLDWcfr0wbrM7Rv1/oFB2ACYPTrIrnqYNxgFlQID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQU5K8rJnEaK0gnhS9SZizv8IkTcT4wDQYJKoZIhvcNAQEMBQADggIBAJ+qQibb -C5u+/x6Wki4+omVKapi6Ist9wTrYggoGxval3sBOh2Z5ofmmWJyq+bXmYOfg6LEe -QkEzCzc9zolwFcq1JKjPa7XSQCGYzyI0zzvFIoTgxQ6KfF2I5DUkzps+GlQebtuy -h6f88/qBVRRiClmpIgUxPoLW7ttXNLwzldMXG+gnoot7TiYaelpkttGsN/H9oPM4 -7HLwEXWdyzRSjeZ2axfG34arJ45JK3VmgRAhpuo+9K4l/3wV3s6MJT/KYnAK9y8J -ZgfIPxz88NtFMN9iiMG1D53Dn0reWVlHxYciNuaCp+0KueIHoI17eko8cdLiA6Ef -MgfdG+RCzgwARWGAtQsgWSl4vflVy2PFPEz0tv/bal8xa5meLMFrUKTX5hgUvYU/ -Z6tGn6D/Qqc6f1zLXbBwHSs09dR2CQzreExZBfMzQsNhFRAbd03OIozUhfJFfbdT -6u9AWpQKXCBfTkBdYiJ23//OYb2MI3jSNwLgjt7RETeJ9r/tSQdirpLsQBqvFAnZ -0E6yove+7u7Y/9waLd64NnHi/Hm3lCXRSHNboTXns5lndcEZOitHTtNCjv0xyBZm -2tIMPNuzjsmhDYAPexZ3FL//2wmUspO8IFgV6dtxQ/PeEMMA3KgqlbbC1j+Qa3bb -bP6MvPJwNQzcmRk13NfIRmPVNnGuV/u3gm3c ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R2 O=Google Trust Services LLC -# Subject: CN=GTS Root R2 O=Google Trust Services LLC -# Label: "GTS Root R2" -# Serial: 159662449406622349769042896298 -# MD5 Fingerprint: 1e:39:c0:53:e6:1e:29:82:0b:ca:52:55:36:5d:57:dc -# SHA1 Fingerprint: 9a:44:49:76:32:db:de:fa:d0:bc:fb:5a:7b:17:bd:9e:56:09:24:94 -# SHA256 Fingerprint: 8d:25:cd:97:22:9d:bf:70:35:6b:da:4e:b3:cc:73:40:31:e2:4c:f0:0f:af:cf:d3:2d:c7:6e:b5:84:1c:7e:a8 ------BEGIN CERTIFICATE----- -MIIFVzCCAz+gAwIBAgINAgPlrsWNBCUaqxElqjANBgkqhkiG9w0BAQwFADBHMQsw -CQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEU -MBIGA1UEAxMLR1RTIFJvb3QgUjIwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAw -MDAwWjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZp -Y2VzIExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjIwggIiMA0GCSqGSIb3DQEBAQUA -A4ICDwAwggIKAoICAQDO3v2m++zsFDQ8BwZabFn3GTXd98GdVarTzTukk3LvCvpt -nfbwhYBboUhSnznFt+4orO/LdmgUud+tAWyZH8QiHZ/+cnfgLFuv5AS/T3KgGjSY -6Dlo7JUle3ah5mm5hRm9iYz+re026nO8/4Piy33B0s5Ks40FnotJk9/BW9BuXvAu -MC6C/Pq8tBcKSOWIm8Wba96wyrQD8Nr0kLhlZPdcTK3ofmZemde4wj7I0BOdre7k -RXuJVfeKH2JShBKzwkCX44ofR5GmdFrS+LFjKBC4swm4VndAoiaYecb+3yXuPuWg -f9RhD1FLPD+M2uFwdNjCaKH5wQzpoeJ/u1U8dgbuak7MkogwTZq9TwtImoS1mKPV -+3PBV2HdKFZ1E66HjucMUQkQdYhMvI35ezzUIkgfKtzra7tEscszcTJGr61K8Yzo -dDqs5xoic4DSMPclQsciOzsSrZYuxsN2B6ogtzVJV+mSSeh2FnIxZyuWfoqjx5RW -Ir9qS34BIbIjMt/kmkRtWVtd9QCgHJvGeJeNkP+byKq0rxFROV7Z+2et1VsRnTKa -G73VululycslaVNVJ1zgyjbLiGH7HrfQy+4W+9OmTN6SpdTi3/UGVN4unUu0kzCq -gc7dGtxRcw1PcOnlthYhGXmy5okLdWTK1au8CcEYof/UVKGFPP0UJAOyh9OktwID -AQABo0IwQDAOBgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4E -FgQUu//KjiOfT5nK2+JopqUVJxce2Q4wDQYJKoZIhvcNAQEMBQADggIBAB/Kzt3H -vqGf2SdMC9wXmBFqiN495nFWcrKeGk6c1SuYJF2ba3uwM4IJvd8lRuqYnrYb/oM8 -0mJhwQTtzuDFycgTE1XnqGOtjHsB/ncw4c5omwX4Eu55MaBBRTUoCnGkJE+M3DyC -B19m3H0Q/gxhswWV7uGugQ+o+MePTagjAiZrHYNSVc61LwDKgEDg4XSsYPWHgJ2u -NmSRXbBoGOqKYcl3qJfEycel/FVL8/B/uWU9J2jQzGv6U53hkRrJXRqWbTKH7QMg -yALOWr7Z6v2yTcQvG99fevX4i8buMTolUVVnjWQye+mew4K6Ki3pHrTgSAai/Gev -HyICc/sgCq+dVEuhzf9gR7A/Xe8bVr2XIZYtCtFenTgCR2y59PYjJbigapordwj6 -xLEokCZYCDzifqrXPW+6MYgKBesntaFJ7qBFVHvmJ2WZICGoo7z7GJa7Um8M7YNR -TOlZ4iBgxcJlkoKM8xAfDoqXvneCbT+PHV28SSe9zE8P4c52hgQjxcCMElv924Sg -JPFI/2R80L5cFtHvma3AH/vLrrw4IgYmZNralw4/KBVEqE8AyvCazM90arQ+POuV -7LXTWtiBmelDGDfrs7vRWGJB82bSj6p4lVQgw1oudCvV0b4YacCs1aTPObpRhANl -6WLAYv7YTVWW4tAR+kg0Eeye7QUd5MjWHYbL ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R3 O=Google Trust Services LLC -# Subject: CN=GTS Root R3 O=Google Trust Services LLC -# Label: "GTS Root R3" -# Serial: 159662495401136852707857743206 -# MD5 Fingerprint: 3e:e7:9d:58:02:94:46:51:94:e5:e0:22:4a:8b:e7:73 -# SHA1 Fingerprint: ed:e5:71:80:2b:c8:92:b9:5b:83:3c:d2:32:68:3f:09:cd:a0:1e:46 -# SHA256 Fingerprint: 34:d8:a7:3e:e2:08:d9:bc:db:0d:95:65:20:93:4b:4e:40:e6:94:82:59:6e:8b:6f:73:c8:42:6b:01:0a:6f:48 ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPluILrIPglJ209ZjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjMwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjMwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AAQfTzOHMymKoYTey8chWEGJ6ladK0uFxh1MJ7x/JlFyb+Kf1qPKzEUURout736G -jOyxfi//qXGdGIRFBEFVbivqJn+7kAHjSxm65FSWRQmx1WyRRK2EE46ajA2ADDL2 -4CejQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBTB8Sa6oC2uhYHP0/EqEr24Cmf9vDAKBggqhkjOPQQDAwNpADBmAjEA9uEglRR7 -VKOQFhG/hMjqb2sXnh5GmCCbn9MN2azTL818+FsuVbu/3ZL3pAzcMeGiAjEA/Jdm -ZuVDFhOD3cffL74UOO0BzrEXGhF16b0DjyZ+hOXJYKaV11RZt+cRLInUue4X ------END CERTIFICATE----- - -# Issuer: CN=GTS Root R4 O=Google Trust Services LLC -# Subject: CN=GTS Root R4 O=Google Trust Services LLC -# Label: "GTS Root R4" -# Serial: 159662532700760215368942768210 -# MD5 Fingerprint: 43:96:83:77:19:4d:76:b3:9d:65:52:e4:1d:22:a5:e8 -# SHA1 Fingerprint: 77:d3:03:67:b5:e0:0c:15:f6:0c:38:61:df:7c:e1:3b:92:46:4d:47 -# SHA256 Fingerprint: 34:9d:fa:40:58:c5:e2:63:12:3b:39:8a:e7:95:57:3c:4e:13:13:c8:3f:e6:8f:93:55:6c:d5:e8:03:1b:3c:7d ------BEGIN CERTIFICATE----- -MIICCTCCAY6gAwIBAgINAgPlwGjvYxqccpBQUjAKBggqhkjOPQQDAzBHMQswCQYD -VQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2VzIExMQzEUMBIG -A1UEAxMLR1RTIFJvb3QgUjQwHhcNMTYwNjIyMDAwMDAwWhcNMzYwNjIyMDAwMDAw -WjBHMQswCQYDVQQGEwJVUzEiMCAGA1UEChMZR29vZ2xlIFRydXN0IFNlcnZpY2Vz -IExMQzEUMBIGA1UEAxMLR1RTIFJvb3QgUjQwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AATzdHOnaItgrkO4NcWBMHtLSZ37wWHO5t5GvWvVYRg1rkDdc/eJkTBa6zzuhXyi -QHY7qca4R9gq55KRanPpsXI5nymfopjTX15YhmUPoYRlBtHci8nHc8iMai/lxKvR -HYqjQjBAMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW -BBSATNbrdP9JNqPV2Py1PsVq8JQdjDAKBggqhkjOPQQDAwNpADBmAjEA6ED/g94D -9J+uHXqnLrmvT/aDHQ4thQEd0dlq7A/Cr8deVl5c1RxYIigL9zC2L7F8AjEA8GE8 -p/SgguMh1YQdc4acLa/KNJvxn7kjNuK8YAOdgLOaVsjh4rsUecrNIdSUtUlD ------END CERTIFICATE----- - -# Issuer: CN=Telia Root CA v2 O=Telia Finland Oyj -# Subject: CN=Telia Root CA v2 O=Telia Finland Oyj -# Label: "Telia Root CA v2" -# Serial: 7288924052977061235122729490515358 -# MD5 Fingerprint: 0e:8f:ac:aa:82:df:85:b1:f4:dc:10:1c:fc:99:d9:48 -# SHA1 Fingerprint: b9:99:cd:d1:73:50:8a:c4:47:05:08:9c:8c:88:fb:be:a0:2b:40:cd -# SHA256 Fingerprint: 24:2b:69:74:2f:cb:1e:5b:2a:bf:98:89:8b:94:57:21:87:54:4e:5b:4d:99:11:78:65:73:62:1f:6a:74:b8:2c ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIPAWdfJ9b+euPkrL4JWwWeMA0GCSqGSIb3DQEBCwUAMEQx -CzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZMBcGA1UE -AwwQVGVsaWEgUm9vdCBDQSB2MjAeFw0xODExMjkxMTU1NTRaFw00MzExMjkxMTU1 -NTRaMEQxCzAJBgNVBAYTAkZJMRowGAYDVQQKDBFUZWxpYSBGaW5sYW5kIE95ajEZ -MBcGA1UEAwwQVGVsaWEgUm9vdCBDQSB2MjCCAiIwDQYJKoZIhvcNAQEBBQADggIP -ADCCAgoCggIBALLQPwe84nvQa5n44ndp586dpAO8gm2h/oFlH0wnrI4AuhZ76zBq -AMCzdGh+sq/H1WKzej9Qyow2RCRj0jbpDIX2Q3bVTKFgcmfiKDOlyzG4OiIjNLh9 -vVYiQJ3q9HsDrWj8soFPmNB06o3lfc1jw6P23pLCWBnglrvFxKk9pXSW/q/5iaq9 -lRdU2HhE8Qx3FZLgmEKnpNaqIJLNwaCzlrI6hEKNfdWV5Nbb6WLEWLN5xYzTNTOD -n3WhUidhOPFZPY5Q4L15POdslv5e2QJltI5c0BE0312/UqeBAMN/mUWZFdUXyApT -7GPzmX3MaRKGwhfwAZ6/hLzRUssbkmbOpFPlob/E2wnW5olWK8jjfN7j/4nlNW4o -6GwLI1GpJQXrSPjdscr6bAhR77cYbETKJuFzxokGgeWKrLDiKca5JLNrRBH0pUPC -TEPlcDaMtjNXepUugqD0XBCzYYP2AgWGLnwtbNwDRm41k9V6lS/eINhbfpSQBGq6 -WT0EBXWdN6IOLj3rwaRSg/7Qa9RmjtzG6RJOHSpXqhC8fF6CfaamyfItufUXJ63R -DolUK5X6wK0dmBR4M0KGCqlztft0DbcbMBnEWg4cJ7faGND/isgFuvGqHKI3t+ZI -pEYslOqodmJHixBTB0hXbOKSTbauBcvcwUpej6w9GU7C7WB1K9vBykLVAgMBAAGj -YzBhMB8GA1UdIwQYMBaAFHKs5DN5qkWH9v2sHZ7Wxy+G2CQ5MB0GA1UdDgQWBBRy -rOQzeapFh/b9rB2e1scvhtgkOTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw -AwEB/zANBgkqhkiG9w0BAQsFAAOCAgEAoDtZpwmUPjaE0n4vOaWWl/oRrfxn83EJ -8rKJhGdEr7nv7ZbsnGTbMjBvZ5qsfl+yqwE2foH65IRe0qw24GtixX1LDoJt0nZi -0f6X+J8wfBj5tFJ3gh1229MdqfDBmgC9bXXYfef6xzijnHDoRnkDry5023X4blMM -A8iZGok1GTzTyVR8qPAs5m4HeW9q4ebqkYJpCh3DflminmtGFZhb069GHWLIzoBS -SRE/yQQSwxN8PzuKlts8oB4KtItUsiRnDe+Cy748fdHif64W1lZYudogsYMVoe+K -TTJvQS8TUoKU1xrBeKJR3Stwbbca+few4GeXVtt8YVMJAygCQMez2P2ccGrGKMOF -6eLtGpOg3kuYooQ+BXcBlj37tCAPnHICehIv1aO6UXivKitEZU61/Qrowc15h2Er -3oBXRb9n8ZuRXqWk7FlIEA04x7D6w0RtBPV4UBySllva9bguulvP5fBqnUsvWHMt -Ty3EHD70sz+rFQ47GUGKpMFXEmZxTPpT41frYpUJnlTd0cI8Vzy9OK2YZLe4A5pT -VmBds9hCG1xLEooc6+t9xnppxyd/pPiL8uSUZodL6ZQHCRJ5irLrdATczvREWeAW -ysUsWNc8e89ihmpQfTU2Zqf7N+cox9jQraVplI/owd8k+BsHMYeB2F326CjYSlKA -rBPuUBQemMc= ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST BR Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST BR Root CA 1 2020" -# Serial: 165870826978392376648679885835942448534 -# MD5 Fingerprint: b5:aa:4b:d5:ed:f7:e3:55:2e:8f:72:0a:f3:75:b8:ed -# SHA1 Fingerprint: 1f:5b:98:f0:e3:b5:f7:74:3c:ed:e6:b0:36:7d:32:cd:f4:09:41:67 -# SHA256 Fingerprint: e5:9a:aa:81:60:09:c2:2b:ff:5b:25:ba:d3:7d:f3:06:f0:49:79:7c:1f:81:d8:5a:b0:89:e6:57:bd:8f:00:44 ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQfMmPK4TX3+oPyWWa00tNljAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEJSIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTA5NDUwMFoXDTM1MDIxMTA5 -NDQ1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBCUiBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABMbLxyjR+4T1mu9CFCDhQ2tuda38KwOE1HaTJddZO0Flax7mNCq7dPYS -zuht56vkPE4/RAiLzRZxy7+SmfSk1zxQVFKQhYN4lGdnoxwJGT11NIXe7WB9xwy0 -QVK5buXuQqOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFHOREKv/ -VbNafAkl1bK6CKBrqx9tMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2JyX3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwQlIlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAlJAtE/rhY/hhY+ithXhUkZy4kzg+GkHaQBZTQgjKL47xPoFW -wKrY7RjEsK70PvomAjEA8yjixtsrmfu3Ubgko6SUeho/5jbiA1czijDLgsfWFBHV -dWNbFJWcHwHP2NVypw87 ------END CERTIFICATE----- - -# Issuer: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Subject: CN=D-TRUST EV Root CA 1 2020 O=D-Trust GmbH -# Label: "D-TRUST EV Root CA 1 2020" -# Serial: 126288379621884218666039612629459926992 -# MD5 Fingerprint: 8c:2d:9d:70:9f:48:99:11:06:11:fb:e9:cb:30:c0:6e -# SHA1 Fingerprint: 61:db:8c:21:59:69:03:90:d8:7c:9c:12:86:54:cf:9d:3d:f4:dd:07 -# SHA256 Fingerprint: 08:17:0d:1a:a3:64:53:90:1a:2f:95:92:45:e3:47:db:0c:8d:37:ab:aa:bc:56:b8:1a:a1:00:dc:95:89:70:db ------BEGIN CERTIFICATE----- -MIIC2zCCAmCgAwIBAgIQXwJB13qHfEwDo6yWjfv/0DAKBggqhkjOPQQDAzBIMQsw -CQYDVQQGEwJERTEVMBMGA1UEChMMRC1UcnVzdCBHbWJIMSIwIAYDVQQDExlELVRS -VVNUIEVWIFJvb3QgQ0EgMSAyMDIwMB4XDTIwMDIxMTEwMDAwMFoXDTM1MDIxMTA5 -NTk1OVowSDELMAkGA1UEBhMCREUxFTATBgNVBAoTDEQtVHJ1c3QgR21iSDEiMCAG -A1UEAxMZRC1UUlVTVCBFViBSb290IENBIDEgMjAyMDB2MBAGByqGSM49AgEGBSuB -BAAiA2IABPEL3YZDIBnfl4XoIkqbz52Yv7QFJsnL46bSj8WeeHsxiamJrSc8ZRCC -/N/DnU7wMyPE0jL1HLDfMxddxfCxivnvubcUyilKwg+pf3VlSSowZ/Rk99Yad9rD -wpdhQntJraOCAQ0wggEJMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFH8QARY3 -OqQo5FD4pPfsazK2/umLMA4GA1UdDwEB/wQEAwIBBjCBxgYDVR0fBIG+MIG7MD6g -PKA6hjhodHRwOi8vY3JsLmQtdHJ1c3QubmV0L2NybC9kLXRydXN0X2V2X3Jvb3Rf -Y2FfMV8yMDIwLmNybDB5oHegdYZzbGRhcDovL2RpcmVjdG9yeS5kLXRydXN0Lm5l -dC9DTj1ELVRSVVNUJTIwRVYlMjBSb290JTIwQ0ElMjAxJTIwMjAyMCxPPUQtVHJ1 -c3QlMjBHbWJILEM9REU/Y2VydGlmaWNhdGVyZXZvY2F0aW9ubGlzdDAKBggqhkjO -PQQDAwNpADBmAjEAyjzGKnXCXnViOTYAYFqLwZOZzNnbQTs7h5kXO9XMT8oi96CA -y/m0sRtW9XLS/BnRAjEAkfcwkz8QRitxpNA7RJvAKQIFskF3UfN5Wp6OFKBOQtJb -gfM0agPnIjhQW+0ZT0MW ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS ECC P384 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS ECC P384 Root G5" -# Serial: 13129116028163249804115411775095713523 -# MD5 Fingerprint: d3:71:04:6a:43:1c:db:a6:59:e1:a8:a3:aa:c5:71:ed -# SHA1 Fingerprint: 17:f3:de:5e:9f:0f:19:e9:8e:f6:1f:32:26:6e:20:c4:07:ae:30:ee -# SHA256 Fingerprint: 01:8e:13:f0:77:25:32:cf:80:9b:d1:b1:72:81:86:72:83:fc:48:c6:e1:3b:e9:c6:98:12:85:4a:49:0c:1b:05 ------BEGIN CERTIFICATE----- -MIICGTCCAZ+gAwIBAgIQCeCTZaz32ci5PhwLBCou8zAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJjAkBgNVBAMTHURp -Z2lDZXJ0IFRMUyBFQ0MgUDM4NCBSb290IEc1MB4XDTIxMDExNTAwMDAwMFoXDTQ2 -MDExNDIzNTk1OVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkRpZ2lDZXJ0LCBJ -bmMuMSYwJAYDVQQDEx1EaWdpQ2VydCBUTFMgRUNDIFAzODQgUm9vdCBHNTB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABMFEoc8Rl1Ca3iOCNQfN0MsYndLxf3c1TzvdlHJS -7cI7+Oz6e2tYIOyZrsn8aLN1udsJ7MgT9U7GCh1mMEy7H0cKPGEQQil8pQgO4CLp -0zVozptjn4S1mU1YoI71VOeVyaNCMEAwHQYDVR0OBBYEFMFRRVBZqz7nLFr6ICIS -B4CIfBFqMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MAoGCCqGSM49 -BAMDA2gAMGUCMQCJao1H5+z8blUD2WdsJk6Dxv3J+ysTvLd6jLRl0mlpYxNjOyZQ -LgGheQaRnUi/wr4CMEfDFXuxoJGZSZOoPHzoRgaLLPIxAJSdYsiJvRmEFOml+wG4 -DXZDjC5Ty3zfDBeWUA== ------END CERTIFICATE----- - -# Issuer: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Subject: CN=DigiCert TLS RSA4096 Root G5 O=DigiCert, Inc. -# Label: "DigiCert TLS RSA4096 Root G5" -# Serial: 11930366277458970227240571539258396554 -# MD5 Fingerprint: ac:fe:f7:34:96:a9:f2:b3:b4:12:4b:e4:27:41:6f:e1 -# SHA1 Fingerprint: a7:88:49:dc:5d:7c:75:8c:8c:de:39:98:56:b3:aa:d0:b2:a5:71:35 -# SHA256 Fingerprint: 37:1a:00:dc:05:33:b3:72:1a:7e:eb:40:e8:41:9e:70:79:9d:2b:0a:0f:2c:1d:80:69:31:65:f7:ce:c4:ad:75 ------BEGIN CERTIFICATE----- -MIIFZjCCA06gAwIBAgIQCPm0eKj6ftpqMzeJ3nzPijANBgkqhkiG9w0BAQwFADBN -MQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQsIEluYy4xJTAjBgNVBAMT -HERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwHhcNMjEwMTE1MDAwMDAwWhcN -NDYwMTE0MjM1OTU5WjBNMQswCQYDVQQGEwJVUzEXMBUGA1UEChMORGlnaUNlcnQs -IEluYy4xJTAjBgNVBAMTHERpZ2lDZXJ0IFRMUyBSU0E0MDk2IFJvb3QgRzUwggIi -MA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCz0PTJeRGd/fxmgefM1eS87IE+ -ajWOLrfn3q/5B03PMJ3qCQuZvWxX2hhKuHisOjmopkisLnLlvevxGs3npAOpPxG0 -2C+JFvuUAT27L/gTBaF4HI4o4EXgg/RZG5Wzrn4DReW+wkL+7vI8toUTmDKdFqgp -wgscONyfMXdcvyej/Cestyu9dJsXLfKB2l2w4SMXPohKEiPQ6s+d3gMXsUJKoBZM -pG2T6T867jp8nVid9E6P/DsjyG244gXazOvswzH016cpVIDPRFtMbzCe88zdH5RD -nU1/cHAN1DrRN/BsnZvAFJNY781BOHW8EwOVfH/jXOnVDdXifBBiqmvwPXbzP6Po -sMH976pXTayGpxi0KcEsDr9kvimM2AItzVwv8n/vFfQMFawKsPHTDU9qTXeXAaDx -Zre3zu/O7Oyldcqs4+Fj97ihBMi8ez9dLRYiVu1ISf6nL3kwJZu6ay0/nTvEF+cd -Lvvyz6b84xQslpghjLSR6Rlgg/IwKwZzUNWYOwbpx4oMYIwo+FKbbuH2TbsGJJvX -KyY//SovcfXWJL5/MZ4PbeiPT02jP/816t9JXkGPhvnxd3lLG7SjXi/7RgLQZhNe -XoVPzthwiHvOAbWWl9fNff2C+MIkwcoBOU+NosEUQB+cZtUMCUbW8tDRSHZWOkPL -tgoRObqME2wGtZ7P6wIDAQABo0IwQDAdBgNVHQ4EFgQUUTMc7TZArxfTJc1paPKv -TiM+s0EwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcN -AQEMBQADggIBAGCmr1tfV9qJ20tQqcQjNSH/0GEwhJG3PxDPJY7Jv0Y02cEhJhxw -GXIeo8mH/qlDZJY6yFMECrZBu8RHANmfGBg7sg7zNOok992vIGCukihfNudd5N7H -PNtQOa27PShNlnx2xlv0wdsUpasZYgcYQF+Xkdycx6u1UQ3maVNVzDl92sURVXLF -O4uJ+DQtpBflF+aZfTCIITfNMBc9uPK8qHWgQ9w+iUuQrm0D4ByjoJYJu32jtyoQ -REtGBzRj7TG5BO6jm5qu5jF49OokYTurWGT/u4cnYiWB39yhL/btp/96j1EuMPik -AdKFOV8BmZZvWltwGUb+hmA+rYAQCd05JS9Yf7vSdPD3Rh9GOUrYU9DzLjtxpdRv -/PNn5AeP3SYZ4Y1b+qOTEZvpyDrDVWiakuFSdjjo4bq9+0/V77PnSIMx8IIh47a+ -p6tv75/fTM8BuGJqIz3nCU2AG3swpMPdB380vqQmsvZB6Akd4yCYqjdP//fx4ilw -MUc/dNAUFvohigLVigmUdy7yWSiLfFCSCmZ4OIN1xLVaqBHG5cGdZlXPU8Sv13WF -qUITVuwhd4GTWgzqltlJyqEI8pc7bZsEGCREjnwB8twl2F6GmrE52/WRMmrRpnCK -ovfepEWFJqgejF0pW8hL2JpqA15w8oVPbEtoL8pU9ozaMv7Da4M/OMZ+ ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root R1 O=Certainly -# Subject: CN=Certainly Root R1 O=Certainly -# Label: "Certainly Root R1" -# Serial: 188833316161142517227353805653483829216 -# MD5 Fingerprint: 07:70:d4:3e:82:87:a0:fa:33:36:13:f4:fa:33:e7:12 -# SHA1 Fingerprint: a0:50:ee:0f:28:71:f4:27:b2:12:6d:6f:50:96:25:ba:cc:86:42:af -# SHA256 Fingerprint: 77:b8:2c:d8:64:4c:43:05:f7:ac:c5:cb:15:6b:45:67:50:04:03:3d:51:c6:0c:62:02:a8:e0:c3:34:67:d3:a0 ------BEGIN CERTIFICATE----- -MIIFRzCCAy+gAwIBAgIRAI4P+UuQcWhlM1T01EQ5t+AwDQYJKoZIhvcNAQELBQAw -PTELMAkGA1UEBhMCVVMxEjAQBgNVBAoTCUNlcnRhaW5seTEaMBgGA1UEAxMRQ2Vy -dGFpbmx5IFJvb3QgUjEwHhcNMjEwNDAxMDAwMDAwWhcNNDYwNDAxMDAwMDAwWjA9 -MQswCQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0 -YWlubHkgUm9vdCBSMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANA2 -1B/q3avk0bbm+yLA3RMNansiExyXPGhjZjKcA7WNpIGD2ngwEc/csiu+kr+O5MQT -vqRoTNoCaBZ0vrLdBORrKt03H2As2/X3oXyVtwxwhi7xOu9S98zTm/mLvg7fMbed -aFySpvXl8wo0tf97ouSHocavFwDvA5HtqRxOcT3Si2yJ9HiG5mpJoM610rCrm/b0 -1C7jcvk2xusVtyWMOvwlDbMicyF0yEqWYZL1LwsYpfSt4u5BvQF5+paMjRcCMLT5 -r3gajLQ2EBAHBXDQ9DGQilHFhiZ5shGIXsXwClTNSaa/ApzSRKft43jvRl5tcdF5 -cBxGX1HpyTfcX35pe0HfNEXgO4T0oYoKNp43zGJS4YkNKPl6I7ENPT2a/Z2B7yyQ -wHtETrtJ4A5KVpK8y7XdeReJkd5hiXSSqOMyhb5OhaRLWcsrxXiOcVTQAjeZjOVJ -6uBUcqQRBi8LjMFbvrWhsFNunLhgkR9Za/kt9JQKl7XsxXYDVBtlUrpMklZRNaBA -2CnbrlJ2Oy0wQJuK0EJWtLeIAaSHO1OWzaMWj/Nmqhexx2DgwUMFDO6bW2BvBlyH -Wyf5QBGenDPBt+U1VwV/J84XIIwc/PH72jEpSe31C4SnT8H2TsIonPru4K8H+zMR -eiFPCyEQtkA6qyI6BJyLm4SGcprSp6XEtHWRqSsjAgMBAAGjQjBAMA4GA1UdDwEB -/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTgqj8ljZ9EXME66C6u -d0yEPmcM9DANBgkqhkiG9w0BAQsFAAOCAgEAuVevuBLaV4OPaAszHQNTVfSVcOQr -PbA56/qJYv331hgELyE03fFo8NWWWt7CgKPBjcZq91l3rhVkz1t5BXdm6ozTaw3d -8VkswTOlMIAVRQdFGjEitpIAq5lNOo93r6kiyi9jyhXWx8bwPWz8HA2YEGGeEaIi -1wrykXprOQ4vMMM2SZ/g6Q8CRFA3lFV96p/2O7qUpUzpvD5RtOjKkjZUbVwlKNrd -rRT90+7iIgXr0PK3aBLXWopBGsaSpVo7Y0VPv+E6dyIvXL9G+VoDhRNCX8reU9di -taY1BMJH/5n9hN9czulegChB8n3nHpDYT3Y+gjwN/KUD+nsa2UUeYNrEjvn8K8l7 -lcUq/6qJ34IxD3L/DCfXCh5WAFAeDJDBlrXYFIW7pw0WwfgHJBu6haEaBQmAupVj -yTrsJZ9/nbqkRxWbRHDxakvWOF5D8xh+UG7pWijmZeZ3Gzr9Hb4DJqPb1OG7fpYn -Kx3upPvaJVQTA945xsMfTZDsjxtK0hzthZU4UHlG1sGQUDGpXJpuHfUzVounmdLy -yCwzk5Iwx06MZTMQZBf9JBeW0Y3COmor6xOLRPIh80oat3df1+2IpHLlOR+Vnb5n -wXARPbv0+Em34yaXOp/SX3z7wJl8OSngex2/DaeP0ik0biQVy96QXr8axGbqwua6 -OV+KmalBWQewLK8= ------END CERTIFICATE----- - -# Issuer: CN=Certainly Root E1 O=Certainly -# Subject: CN=Certainly Root E1 O=Certainly -# Label: "Certainly Root E1" -# Serial: 8168531406727139161245376702891150584 -# MD5 Fingerprint: 0a:9e:ca:cd:3e:52:50:c6:36:f3:4b:a3:ed:a7:53:e9 -# SHA1 Fingerprint: f9:e1:6d:dc:01:89:cf:d5:82:45:63:3e:c5:37:7d:c2:eb:93:6f:2b -# SHA256 Fingerprint: b4:58:5f:22:e4:ac:75:6a:4e:86:12:a1:36:1c:5d:9d:03:1a:93:fd:84:fe:bb:77:8f:a3:06:8b:0f:c4:2d:c2 ------BEGIN CERTIFICATE----- -MIIB9zCCAX2gAwIBAgIQBiUzsUcDMydc+Y2aub/M+DAKBggqhkjOPQQDAzA9MQsw -CQYDVQQGEwJVUzESMBAGA1UEChMJQ2VydGFpbmx5MRowGAYDVQQDExFDZXJ0YWlu -bHkgUm9vdCBFMTAeFw0yMTA0MDEwMDAwMDBaFw00NjA0MDEwMDAwMDBaMD0xCzAJ -BgNVBAYTAlVTMRIwEAYDVQQKEwlDZXJ0YWlubHkxGjAYBgNVBAMTEUNlcnRhaW5s -eSBSb290IEUxMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE3m/4fxzf7flHh4axpMCK -+IKXgOqPyEpeKn2IaKcBYhSRJHpcnqMXfYqGITQYUBsQ3tA3SybHGWCA6TS9YBk2 -QNYphwk8kXr2vBMj3VlOBF7PyAIcGFPBMdjaIOlEjeR2o0IwQDAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU8ygYy2R17ikq6+2uI1g4 -hevIIgcwCgYIKoZIzj0EAwMDaAAwZQIxALGOWiDDshliTd6wT99u0nCK8Z9+aozm -ut6Dacpps6kFtZaSF4fC0urQe87YQVt8rgIwRt7qy12a7DLCZRawTDBcMPPaTnOG -BtjOiQRINzf43TNRnXCve1XYAS59BWQOhriR ------END CERTIFICATE----- - -# Issuer: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication RootCA3 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication RootCA3" -# Serial: 16247922307909811815 -# MD5 Fingerprint: 1c:9a:16:ff:9e:5c:e0:4d:8a:14:01:f4:35:5d:29:26 -# SHA1 Fingerprint: c3:03:c8:22:74:92:e5:61:a2:9c:5f:79:91:2b:1e:44:13:91:30:3a -# SHA256 Fingerprint: 24:a5:5c:2a:b0:51:44:2d:06:17:76:65:41:23:9a:4a:d0:32:d7:c5:51:75:aa:34:ff:de:2f:bc:4f:5c:52:94 ------BEGIN CERTIFICATE----- -MIIFfzCCA2egAwIBAgIJAOF8N0D9G/5nMA0GCSqGSIb3DQEBDAUAMF0xCzAJBgNV -BAYTAkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMScw -JQYDVQQDEx5TZWN1cml0eSBDb21tdW5pY2F0aW9uIFJvb3RDQTMwHhcNMTYwNjE2 -MDYxNzE2WhcNMzgwMTE4MDYxNzE2WjBdMQswCQYDVQQGEwJKUDElMCMGA1UEChMc -U0VDT00gVHJ1c3QgU3lzdGVtcyBDTy4sTFRELjEnMCUGA1UEAxMeU2VjdXJpdHkg -Q29tbXVuaWNhdGlvbiBSb290Q0EzMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIIC -CgKCAgEA48lySfcw3gl8qUCBWNO0Ot26YQ+TUG5pPDXC7ltzkBtnTCHsXzW7OT4r -CmDvu20rhvtxosis5FaU+cmvsXLUIKx00rgVrVH+hXShuRD+BYD5UpOzQD11EKzA -lrenfna84xtSGc4RHwsENPXY9Wk8d/Nk9A2qhd7gCVAEF5aEt8iKvE1y/By7z/MG -TfmfZPd+pmaGNXHIEYBMwXFAWB6+oHP2/D5Q4eAvJj1+XCO1eXDe+uDRpdYMQXF7 -9+qMHIjH7Iv10S9VlkZ8WjtYO/u62C21Jdp6Ts9EriGmnpjKIG58u4iFW/vAEGK7 -8vknR+/RiTlDxN/e4UG/VHMgly1s2vPUB6PmudhvrvyMGS7TZ2crldtYXLVqAvO4 -g160a75BflcJdURQVc1aEWEhCmHCqYj9E7wtiS/NYeCVvsq1e+F7NGcLH7YMx3we -GVPKp7FKFSBWFHA9K4IsD50VHUeAR/94mQ4xr28+j+2GaR57GIgUssL8gjMunEst -+3A7caoreyYn8xrC3PsXuKHqy6C0rtOUfnrQq8PsOC0RLoi/1D+tEjtCrI8Cbn3M -0V9hvqG8OmpI6iZVIhZdXw3/JzOfGAN0iltSIEdrRU0id4xVJ/CvHozJgyJUt5rQ -T9nO/NkuHJYosQLTA70lUhw0Zk8jq/R3gpYd0VcwCBEF/VfR2ccCAwEAAaNCMEAw -HQYDVR0OBBYEFGQUfPxYchamCik0FW8qy7z8r6irMA4GA1UdDwEB/wQEAwIBBjAP -BgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBDAUAA4ICAQDcAiMI4u8hOscNtybS -YpOnpSNyByCCYN8Y11StaSWSntkUz5m5UoHPrmyKO1o5yGwBQ8IibQLwYs1OY0PA -FNr0Y/Dq9HHuTofjcan0yVflLl8cebsjqodEV+m9NU1Bu0soo5iyG9kLFwfl9+qd -9XbXv8S2gVj/yP9kaWJ5rW4OH3/uHWnlt3Jxs/6lATWUVCvAUm2PVcTJ0rjLyjQI -UYWg9by0F1jqClx6vWPGOi//lkkZhOpn2ASxYfQAW0q3nHE3GYV5v4GwxxMOdnE+ -OoAGrgYWp421wsTL/0ClXI2lyTrtcoHKXJg80jQDdwj98ClZXSEIx2C/pHF7uNke -gr4Jr2VvKKu/S7XuPghHJ6APbw+LP6yVGPO5DtxnVW5inkYO0QR4ynKudtml+LLf -iAlhi+8kTtFZP1rUPcmTPCtk9YENFpb3ksP+MW/oKjJ0DvRMmEoYDjBU1cXrvMUV -nuiZIesnKwkK2/HmcBhWuwzkvvnoEKQTkrgc4NtnHVMDpCKn3F2SEDzq//wbEBrD -2NCcnWXL0CsnMQMeNuE9dnUM/0Umud1RvCPHX9jYhxBAEg09ODfnRDwYwFMJZI// -1ZqmfHAuc1Uh6N//g7kdPjIe1qZ9LPFm6Vwdp6POXiUyK+OVrCoHzrQoeIY8Laad -TdJ0MN1kURXbg4NR16/9M51NZg== ------END CERTIFICATE----- - -# Issuer: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Subject: CN=Security Communication ECC RootCA1 O=SECOM Trust Systems CO.,LTD. -# Label: "Security Communication ECC RootCA1" -# Serial: 15446673492073852651 -# MD5 Fingerprint: 7e:43:b0:92:68:ec:05:43:4c:98:ab:5d:35:2e:7e:86 -# SHA1 Fingerprint: b8:0e:26:a9:bf:d2:b2:3b:c0:ef:46:c9:ba:c7:bb:f6:1d:0d:41:41 -# SHA256 Fingerprint: e7:4f:bd:a5:5b:d5:64:c4:73:a3:6b:44:1a:a7:99:c8:a6:8e:07:74:40:e8:28:8b:9f:a1:e5:0e:4b:ba:ca:11 ------BEGIN CERTIFICATE----- -MIICODCCAb6gAwIBAgIJANZdm7N4gS7rMAoGCCqGSM49BAMDMGExCzAJBgNVBAYT -AkpQMSUwIwYDVQQKExxTRUNPTSBUcnVzdCBTeXN0ZW1zIENPLixMVEQuMSswKQYD -VQQDEyJTZWN1cml0eSBDb21tdW5pY2F0aW9uIEVDQyBSb290Q0ExMB4XDTE2MDYx -NjA1MTUyOFoXDTM4MDExODA1MTUyOFowYTELMAkGA1UEBhMCSlAxJTAjBgNVBAoT -HFNFQ09NIFRydXN0IFN5c3RlbXMgQ08uLExURC4xKzApBgNVBAMTIlNlY3VyaXR5 -IENvbW11bmljYXRpb24gRUNDIFJvb3RDQTEwdjAQBgcqhkjOPQIBBgUrgQQAIgNi -AASkpW9gAwPDvTH00xecK4R1rOX9PVdu12O/5gSJko6BnOPpR27KkBLIE+Cnnfdl -dB9sELLo5OnvbYUymUSxXv3MdhDYW72ixvnWQuRXdtyQwjWpS4g8EkdtXP9JTxpK -ULGjQjBAMB0GA1UdDgQWBBSGHOf+LaVKiwj+KBH6vqNm+GBZLzAOBgNVHQ8BAf8E -BAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjAVXUI9/Lbu -9zuxNuie9sRGKEkz0FhDKmMpzE2xtHqiuQ04pV1IKv3LsnNdo4gIxwwCMQDAqy0O -be0YottT6SXbVQjgUMzfRGEWgqtJsLKB7HOHeLRMsmIbEvoWTSVLY70eN9k= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA1 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA1" -# Serial: 113562791157148395269083148143378328608 -# MD5 Fingerprint: 42:32:99:76:43:33:36:24:35:07:82:9b:28:f9:d0:90 -# SHA1 Fingerprint: d5:ec:8d:7b:4c:ba:79:f4:e7:e8:cb:9d:6b:ae:77:83:10:03:21:6a -# SHA256 Fingerprint: f3:89:6f:88:fe:7c:0a:88:27:66:a7:fa:6a:d2:74:9f:b5:7a:7f:3e:98:fb:76:9c:1f:a7:b0:9c:2c:44:d5:ae ------BEGIN CERTIFICATE----- -MIIFdDCCA1ygAwIBAgIQVW9l47TZkGobCdFsPsBsIDANBgkqhkiG9w0BAQsFADBU -MQswCQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRI -T1JJVFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0ExMB4XDTE5MTIxOTAz -MTYxN1oXDTQ0MTIxMjAzMTYxN1owVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJF -SUpJTkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2Jh -bCBSb290IENBMTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAPFmCL3Z -xRVhy4QEQaVpN3cdwbB7+sN3SJATcmTRuHyQNZ0YeYjjlwE8R4HyDqKYDZ4/N+AZ -spDyRhySsTphzvq3Rp4Dhtczbu33RYx2N95ulpH3134rhxfVizXuhJFyV9xgw8O5 -58dnJCNPYwpj9mZ9S1WnP3hkSWkSl+BMDdMJoDIwOvqfwPKcxRIqLhy1BDPapDgR -at7GGPZHOiJBhyL8xIkoVNiMpTAK+BcWyqw3/XmnkRd4OJmtWO2y3syJfQOcs4ll -5+M7sSKGjwZteAf9kRJ/sGsciQ35uMt0WwfCyPQ10WRjeulumijWML3mG90Vr4Tq -nMfK9Q7q8l0ph49pczm+LiRvRSGsxdRpJQaDrXpIhRMsDQa4bHlW/KNnMoH1V6XK -V0Jp6VwkYe/iMBhORJhVb3rCk9gZtt58R4oRTklH2yiUAguUSiz5EtBP6DF+bHq/ -pj+bOT0CFqMYs2esWz8sgytnOYFcuX6U1WTdno9uruh8W7TXakdI136z1C2OVnZO -z2nxbkRs1CTqjSShGL+9V/6pmTW12xB3uD1IutbB5/EjPtffhZ0nPNRAvQoMvfXn -jSXWgXSHRtQpdaJCbPdzied9v3pKH9MiyRVVz99vfFXQpIsHETdfg6YmV6YBW37+ -WGgHqel62bno/1Afq8K0wM7o6v0PvY1NuLxxAgMBAAGjQjBAMB0GA1UdDgQWBBTF -7+3M2I0hxkjk49cULqcWk+WYATAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE -AwIBBjANBgkqhkiG9w0BAQsFAAOCAgEAUoKsITQfI/Ki2Pm4rzc2IInRNwPWaZ+4 -YRC6ojGYWUfo0Q0lHhVBDOAqVdVXUsv45Mdpox1NcQJeXyFFYEhcCY5JEMEE3Kli -awLwQ8hOnThJdMkycFRtwUf8jrQ2ntScvd0g1lPJGKm1Vrl2i5VnZu69mP6u775u -+2D2/VnGKhs/I0qUJDAnyIm860Qkmss9vk/Ves6OF8tiwdneHg56/0OGNFK8YT88 -X7vZdrRTvJez/opMEi4r89fO4aL/3Xtw+zuhTaRjAv04l5U/BXCga99igUOLtFkN -SoxUnMW7gZ/NfaXvCyUeOiDbHPwfmGcCCtRzRBPbUYQaVQNW4AB+dAb/OMRyHdOo -P2gxXdMJxy6MW2Pg6Nwe0uxhHvLe5e/2mXZgLR6UcnHGCyoyx5JO1UbXHfmpGQrI -+pXObSOYqgs4rZpWDW+N8TEAiMEXnM0ZNjX+VVOg4DwzX5Ze4jLp3zO7Bkqp2IRz -znfSxqxx4VyjHQy7Ct9f4qNx2No3WqB4K/TUfet27fJhcKVlmtOJNBir+3I+17Q9 -eVzYH6Eze9mCUAyTF6ps3MKCuwJXNq+YJyo5UOGwifUll35HaBC07HPKs5fRJNz2 -YqAo07WjuGS3iGJCz51TzZm+ZGiPTx4SSPfSKcOYKMryMguTjClPPGAyzQWWYezy -r/6zcCwupvI= ------END CERTIFICATE----- - -# Issuer: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Subject: CN=BJCA Global Root CA2 O=BEIJING CERTIFICATE AUTHORITY -# Label: "BJCA Global Root CA2" -# Serial: 58605626836079930195615843123109055211 -# MD5 Fingerprint: 5e:0a:f6:47:5f:a6:14:e8:11:01:95:3f:4d:01:eb:3c -# SHA1 Fingerprint: f4:27:86:eb:6e:b8:6d:88:31:67:02:fb:ba:66:a4:53:00:aa:7a:a6 -# SHA256 Fingerprint: 57:4d:f6:93:1e:27:80:39:66:7b:72:0a:fd:c1:60:0f:c2:7e:b6:6d:d3:09:29:79:fb:73:85:64:87:21:28:82 ------BEGIN CERTIFICATE----- -MIICJTCCAaugAwIBAgIQLBcIfWQqwP6FGFkGz7RK6zAKBggqhkjOPQQDAzBUMQsw -CQYDVQQGEwJDTjEmMCQGA1UECgwdQkVJSklORyBDRVJUSUZJQ0FURSBBVVRIT1JJ -VFkxHTAbBgNVBAMMFEJKQ0EgR2xvYmFsIFJvb3QgQ0EyMB4XDTE5MTIxOTAzMTgy -MVoXDTQ0MTIxMjAzMTgyMVowVDELMAkGA1UEBhMCQ04xJjAkBgNVBAoMHUJFSUpJ -TkcgQ0VSVElGSUNBVEUgQVVUSE9SSVRZMR0wGwYDVQQDDBRCSkNBIEdsb2JhbCBS -b290IENBMjB2MBAGByqGSM49AgEGBSuBBAAiA2IABJ3LgJGNU2e1uVCxA/jlSR9B -IgmwUVJY1is0j8USRhTFiy8shP8sbqjV8QnjAyEUxEM9fMEsxEtqSs3ph+B99iK+ -+kpRuDCK/eHeGBIK9ke35xe/J4rUQUyWPGCWwf0VHKNCMEAwHQYDVR0OBBYEFNJK -sVF/BvDRgh9Obl+rg/xI1LCRMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMAoGCCqGSM49BAMDA2gAMGUCMBq8W9f+qdJUDkpd0m2xQNz0Q9XSSpkZElaA -94M04TVOSG0ED1cxMDAtsaqdAzjbBgIxAMvMh1PLet8gUXOQwKhbYdDFUDn9hf7B -43j4ptZLvZuHjw/l1lOWqzzIQNph91Oj9w== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root E46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root E46" -# Serial: 88989738453351742415770396670917916916 -# MD5 Fingerprint: 28:23:f8:b2:98:5c:37:16:3b:3e:46:13:4e:b0:b3:01 -# SHA1 Fingerprint: ec:8a:39:6c:40:f0:2e:bc:42:75:d4:9f:ab:1c:1a:5b:67:be:d2:9a -# SHA256 Fingerprint: c9:0f:26:f0:fb:1b:40:18:b2:22:27:51:9b:5c:a2:b5:3e:2c:a5:b3:be:5c:f1:8e:fe:1b:ef:47:38:0c:53:83 ------BEGIN CERTIFICATE----- -MIICOjCCAcGgAwIBAgIQQvLM2htpN0RfFf51KBC49DAKBggqhkjOPQQDAzBfMQsw -CQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1T -ZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwHhcN -MjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEYMBYG -A1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1YmxpYyBT -ZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBFNDYwdjAQBgcqhkjOPQIBBgUrgQQA -IgNiAAR2+pmpbiDt+dd34wc7qNs9Xzjoq1WmVk/WSOrsfy2qw7LFeeyZYX8QeccC -WvkEN/U0NSt3zn8gj1KjAIns1aeibVvjS5KToID1AZTc8GgHHs3u/iVStSBDHBv+ -6xnOQ6OjQjBAMB0GA1UdDgQWBBTRItpMWfFLXyY4qp3W7usNw/upYTAOBgNVHQ8B -Af8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAKBggqhkjOPQQDAwNnADBkAjAn7qRa -qCG76UeXlImldCBteU/IvZNeWBj7LRoAasm4PdCkT0RHlAFWovgzJQxC36oCMB3q -4S6ILuH5px0CMk7yn2xVdOOurvulGu7t0vzCAxHrRVxgED1cf5kDW21USAGKcw== ------END CERTIFICATE----- - -# Issuer: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Subject: CN=Sectigo Public Server Authentication Root R46 O=Sectigo Limited -# Label: "Sectigo Public Server Authentication Root R46" -# Serial: 156256931880233212765902055439220583700 -# MD5 Fingerprint: 32:10:09:52:00:d5:7e:6c:43:df:15:c0:b1:16:93:e5 -# SHA1 Fingerprint: ad:98:f9:f3:e4:7d:75:3b:65:d4:82:b3:a4:52:17:bb:6e:f5:e4:38 -# SHA256 Fingerprint: 7b:b6:47:a6:2a:ee:ac:88:bf:25:7a:a5:22:d0:1f:fe:a3:95:e0:ab:45:c7:3f:93:f6:56:54:ec:38:f2:5a:06 ------BEGIN CERTIFICATE----- -MIIFijCCA3KgAwIBAgIQdY39i658BwD6qSWn4cetFDANBgkqhkiG9w0BAQwFADBf -MQswCQYDVQQGEwJHQjEYMBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQD -Ey1TZWN0aWdvIFB1YmxpYyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYw -HhcNMjEwMzIyMDAwMDAwWhcNNDYwMzIxMjM1OTU5WjBfMQswCQYDVQQGEwJHQjEY -MBYGA1UEChMPU2VjdGlnbyBMaW1pdGVkMTYwNAYDVQQDEy1TZWN0aWdvIFB1Ymxp -YyBTZXJ2ZXIgQXV0aGVudGljYXRpb24gUm9vdCBSNDYwggIiMA0GCSqGSIb3DQEB -AQUAA4ICDwAwggIKAoICAQCTvtU2UnXYASOgHEdCSe5jtrch/cSV1UgrJnwUUxDa -ef0rty2k1Cz66jLdScK5vQ9IPXtamFSvnl0xdE8H/FAh3aTPaE8bEmNtJZlMKpnz -SDBh+oF8HqcIStw+KxwfGExxqjWMrfhu6DtK2eWUAtaJhBOqbchPM8xQljeSM9xf -iOefVNlI8JhD1mb9nxc4Q8UBUQvX4yMPFF1bFOdLvt30yNoDN9HWOaEhUTCDsG3X -ME6WW5HwcCSrv0WBZEMNvSE6Lzzpng3LILVCJ8zab5vuZDCQOc2TZYEhMbUjUDM3 -IuM47fgxMMxF/mL50V0yeUKH32rMVhlATc6qu/m1dkmU8Sf4kaWD5QazYw6A3OAS -VYCmO2a0OYctyPDQ0RTp5A1NDvZdV3LFOxxHVp3i1fuBYYzMTYCQNFu31xR13NgE -SJ/AwSiItOkcyqex8Va3e0lMWeUgFaiEAin6OJRpmkkGj80feRQXEgyDet4fsZfu -+Zd4KKTIRJLpfSYFplhym3kT2BFfrsU4YjRosoYwjviQYZ4ybPUHNs2iTG7sijbt -8uaZFURww3y8nDnAtOFr94MlI1fZEoDlSfB1D++N6xybVCi0ITz8fAr/73trdf+L -HaAZBav6+CuBQug4urv7qv094PPK306Xlynt8xhW6aWWrL3DkJiy4Pmi1KZHQ3xt -zwIDAQABo0IwQDAdBgNVHQ4EFgQUVnNYZJX5khqwEioEYnmhQBWIIUkwDgYDVR0P -AQH/BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAC9c -mTz8Bl6MlC5w6tIyMY208FHVvArzZJ8HXtXBc2hkeqK5Duj5XYUtqDdFqij0lgVQ -YKlJfp/imTYpE0RHap1VIDzYm/EDMrraQKFz6oOht0SmDpkBm+S8f74TlH7Kph52 -gDY9hAaLMyZlbcp+nv4fjFg4exqDsQ+8FxG75gbMY/qB8oFM2gsQa6H61SilzwZA -Fv97fRheORKkU55+MkIQpiGRqRxOF3yEvJ+M0ejf5lG5Nkc/kLnHvALcWxxPDkjB -JYOcCj+esQMzEhonrPcibCTRAUH4WAP+JWgiH5paPHxsnnVI84HxZmduTILA7rpX -DhjvLpr3Etiga+kFpaHpaPi8TD8SHkXoUsCjvxInebnMMTzD9joiFgOgyY9mpFui -TdaBJQbpdqQACj7LzTWb4OE4y2BThihCQRxEV+ioratF4yUQvNs+ZUH7G6aXD+u5 -dHn5HrwdVw1Hr8Mvn4dGp+smWg9WY7ViYG4A++MnESLn/pmPNPW56MORcr3Ywx65 -LvKRRFHQV80MNNVIIb/bE/FmJUNS0nAiNs2fxBx1IK1jcmMGDw4nztJqDby1ORrp -0XZ60Vzk50lJLVU3aPAaOpg+VBeHVOmmJ1CJeyAvP/+/oYtKR5j/K3tJPsMpRmAY -QqszKbrAKbkTidOIijlBO8n9pu0f9GBj39ItVQGL ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS RSA Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS RSA Root CA 2022" -# Serial: 148535279242832292258835760425842727825 -# MD5 Fingerprint: d8:4e:c6:59:30:d8:fe:a0:d6:7a:5a:2c:2c:69:78:da -# SHA1 Fingerprint: ec:2c:83:40:72:af:26:95:10:ff:0e:f2:03:ee:31:70:f6:78:9d:ca -# SHA256 Fingerprint: 8f:af:7d:2e:2c:b4:70:9b:b8:e0:b3:36:66:bf:75:a5:dd:45:b5:de:48:0f:8e:a8:d4:bf:e6:be:bc:17:f2:ed ------BEGIN CERTIFICATE----- -MIIFiTCCA3GgAwIBAgIQb77arXO9CEDii02+1PdbkTANBgkqhkiG9w0BAQsFADBO -MQswCQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQD -DBxTU0wuY29tIFRMUyBSU0EgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzQyMloX -DTQ2MDgxOTE2MzQyMVowTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jw -b3JhdGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgUlNBIFJvb3QgQ0EgMjAyMjCC -AiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBANCkCXJPQIgSYT41I57u9nTP -L3tYPc48DRAokC+X94xI2KDYJbFMsBFMF3NQ0CJKY7uB0ylu1bUJPiYYf7ISf5OY -t6/wNr/y7hienDtSxUcZXXTzZGbVXcdotL8bHAajvI9AI7YexoS9UcQbOcGV0ins -S657Lb85/bRi3pZ7QcacoOAGcvvwB5cJOYF0r/c0WRFXCsJbwST0MXMwgsadugL3 -PnxEX4MN8/HdIGkWCVDi1FW24IBydm5MR7d1VVm0U3TZlMZBrViKMWYPHqIbKUBO -L9975hYsLfy/7PO0+r4Y9ptJ1O4Fbtk085zx7AGL0SDGD6C1vBdOSHtRwvzpXGk3 -R2azaPgVKPC506QVzFpPulJwoxJF3ca6TvvC0PeoUidtbnm1jPx7jMEWTO6Af77w -dr5BUxIzrlo4QqvXDz5BjXYHMtWrifZOZ9mxQnUjbvPNQrL8VfVThxc7wDNY8VLS -+YCk8OjwO4s4zKTGkH8PnP2L0aPP2oOnaclQNtVcBdIKQXTbYxE3waWglksejBYS -d66UNHsef8JmAOSqg+qKkK3ONkRN0VHpvB/zagX9wHQfJRlAUW7qglFA35u5CCoG -AtUjHBPW6dvbxrB6y3snm/vg1UYk7RBLY0ulBY+6uB0rpvqR4pJSvezrZ5dtmi2f -gTIFZzL7SAg/2SW4BCUvAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0j -BBgwFoAU+y437uOEeicuzRk1sTN8/9REQrkwHQYDVR0OBBYEFPsuN+7jhHonLs0Z -NbEzfP/UREK5MA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQsFAAOCAgEAjYlt -hEUY8U+zoO9opMAdrDC8Z2awms22qyIZZtM7QbUQnRC6cm4pJCAcAZli05bg4vsM -QtfhWsSWTVTNj8pDU/0quOr4ZcoBwq1gaAafORpR2eCNJvkLTqVTJXojpBzOCBvf -R4iyrT7gJ4eLSYwfqUdYe5byiB0YrrPRpgqU+tvT5TgKa3kSM/tKWTcWQA673vWJ -DPFs0/dRa1419dvAJuoSc06pkZCmF8NsLzjUo3KUQyxi4U5cMj29TH0ZR6LDSeeW -P4+a0zvkEdiLA9z2tmBVGKaBUfPhqBVq6+AL8BQx1rmMRTqoENjwuSfr98t67wVy -lrXEj5ZzxOhWc5y8aVFjvO9nHEMaX3cZHxj4HCUp+UmZKbaSPaKDN7EgkaibMOlq -bLQjk2UEqxHzDh1TJElTHaE/nUiSEeJ9DU/1172iWD54nR4fK/4huxoTtrEoZP2w -AgDHbICivRZQIA9ygV/MlP+7mea6kMvq+cYMwq7FGc4zoWtcu358NFcXrfA/rs3q -r5nsLFR+jM4uElZI7xc7P0peYNLcdDa8pUNjyw9bowJWCZ4kLOGGgYz+qxcs+sji -Mho6/4UIyYOf8kpIEFR3N+2ivEC+5BB09+Rbu7nzifmPQdjH5FCQNYA+HLhNkNPU -98OwoX6EyneSMSy4kLGCenROmxMmtNVQZlR4rmA= ------END CERTIFICATE----- - -# Issuer: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Subject: CN=SSL.com TLS ECC Root CA 2022 O=SSL Corporation -# Label: "SSL.com TLS ECC Root CA 2022" -# Serial: 26605119622390491762507526719404364228 -# MD5 Fingerprint: 99:d7:5c:f1:51:36:cc:e9:ce:d9:19:2e:77:71:56:c5 -# SHA1 Fingerprint: 9f:5f:d9:1a:54:6d:f5:0c:71:f0:ee:7a:bd:17:49:98:84:73:e2:39 -# SHA256 Fingerprint: c3:2f:fd:9f:46:f9:36:d1:6c:36:73:99:09:59:43:4b:9a:d6:0a:af:bb:9e:7c:f3:36:54:f1:44:cc:1b:a1:43 ------BEGIN CERTIFICATE----- -MIICOjCCAcCgAwIBAgIQFAP1q/s3ixdAW+JDsqXRxDAKBggqhkjOPQQDAzBOMQsw -CQYDVQQGEwJVUzEYMBYGA1UECgwPU1NMIENvcnBvcmF0aW9uMSUwIwYDVQQDDBxT -U0wuY29tIFRMUyBFQ0MgUm9vdCBDQSAyMDIyMB4XDTIyMDgyNTE2MzM0OFoXDTQ2 -MDgxOTE2MzM0N1owTjELMAkGA1UEBhMCVVMxGDAWBgNVBAoMD1NTTCBDb3Jwb3Jh -dGlvbjElMCMGA1UEAwwcU1NMLmNvbSBUTFMgRUNDIFJvb3QgQ0EgMjAyMjB2MBAG -ByqGSM49AgEGBSuBBAAiA2IABEUpNXP6wrgjzhR9qLFNoFs27iosU8NgCTWyJGYm -acCzldZdkkAZDsalE3D07xJRKF3nzL35PIXBz5SQySvOkkJYWWf9lCcQZIxPBLFN -SeR7T5v15wj4A4j3p8OSSxlUgaNjMGEwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSME -GDAWgBSJjy+j6CugFFR781a4Jl9nOAuc0DAdBgNVHQ4EFgQUiY8vo+groBRUe/NW -uCZfZzgLnNAwDgYDVR0PAQH/BAQDAgGGMAoGCCqGSM49BAMDA2gAMGUCMFXjIlbp -15IkWE8elDIPDAI2wv2sdDJO4fscgIijzPvX6yv/N33w7deedWo1dlJF4AIxAMeN -b0Igj762TVntd00pxCAgRWSGOlDGxK0tk/UYfXLtqc/ErFc2KAhl3zx5Zn6g6g== ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA ECC TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA ECC TLS 2021" -# Serial: 81873346711060652204712539181482831616 -# MD5 Fingerprint: 16:9f:ad:f1:70:ad:79:d6:ed:29:b4:d1:c5:79:70:a8 -# SHA1 Fingerprint: 9e:bc:75:10:42:b3:02:f3:81:f4:f7:30:62:d4:8f:c3:a7:51:b2:dd -# SHA256 Fingerprint: b2:fa:e5:3e:14:cc:d7:ab:92:12:06:47:01:ae:27:9c:1d:89:88:fa:cb:77:5f:a8:a0:08:91:4e:66:39:88:a8 ------BEGIN CERTIFICATE----- -MIICFTCCAZugAwIBAgIQPZg7pmY9kGP3fiZXOATvADAKBggqhkjOPQQDAzBMMS4w -LAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgRUNDIFRMUyAyMDIxMQ0w -CwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTI2MjNaFw00MTA0 -MTcwOTI2MjJaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBDQSBF -Q0MgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMHYwEAYHKoZI -zj0CAQYFK4EEACIDYgAEloZYKDcKZ9Cg3iQZGeHkBQcfl+3oZIK59sRxUM6KDP/X -tXa7oWyTbIOiaG6l2b4siJVBzV3dscqDY4PMwL502eCdpO5KTlbgmClBk1IQ1SQ4 -AjJn8ZQSb+/Xxd4u/RmAo0IwQDAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBR2 -KCXWfeBmmnoJsmo7jjPXNtNPojAOBgNVHQ8BAf8EBAMCAYYwCgYIKoZIzj0EAwMD -aAAwZQIwW5kp85wxtolrbNa9d+F851F+uDrNozZffPc8dz7kUK2o59JZDCaOMDtu -CCrCp1rIAjEAmeMM56PDr9NJLkaCI2ZdyQAUEv049OGYa3cpetskz2VAv9LcjBHo -9H1/IISpQuQo ------END CERTIFICATE----- - -# Issuer: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Subject: CN=Atos TrustedRoot Root CA RSA TLS 2021 O=Atos -# Label: "Atos TrustedRoot Root CA RSA TLS 2021" -# Serial: 111436099570196163832749341232207667876 -# MD5 Fingerprint: d4:d3:46:b8:9a:c0:9c:76:5d:9e:3a:c3:b9:99:31:d2 -# SHA1 Fingerprint: 18:52:3b:0d:06:37:e4:d6:3a:df:23:e4:98:fb:5b:16:fb:86:74:48 -# SHA256 Fingerprint: 81:a9:08:8e:a5:9f:b3:64:c5:48:a6:f8:55:59:09:9b:6f:04:05:ef:bf:18:e5:32:4e:c9:f4:57:ba:00:11:2f ------BEGIN CERTIFICATE----- -MIIFZDCCA0ygAwIBAgIQU9XP5hmTC/srBRLYwiqipDANBgkqhkiG9w0BAQwFADBM -MS4wLAYDVQQDDCVBdG9zIFRydXN0ZWRSb290IFJvb3QgQ0EgUlNBIFRMUyAyMDIx -MQ0wCwYDVQQKDARBdG9zMQswCQYDVQQGEwJERTAeFw0yMTA0MjIwOTIxMTBaFw00 -MTA0MTcwOTIxMDlaMEwxLjAsBgNVBAMMJUF0b3MgVHJ1c3RlZFJvb3QgUm9vdCBD -QSBSU0EgVExTIDIwMjExDTALBgNVBAoMBEF0b3MxCzAJBgNVBAYTAkRFMIICIjAN -BgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAtoAOxHm9BYx9sKOdTSJNy/BBl01Z -4NH+VoyX8te9j2y3I49f1cTYQcvyAh5x5en2XssIKl4w8i1mx4QbZFc4nXUtVsYv -Ye+W/CBGvevUez8/fEc4BKkbqlLfEzfTFRVOvV98r61jx3ncCHvVoOX3W3WsgFWZ -kmGbzSoXfduP9LVq6hdKZChmFSlsAvFr1bqjM9xaZ6cF4r9lthawEO3NUDPJcFDs -GY6wx/J0W2tExn2WuZgIWWbeKQGb9Cpt0xU6kGpn8bRrZtkh68rZYnxGEFzedUln -nkL5/nWpo63/dgpnQOPF943HhZpZnmKaau1Fh5hnstVKPNe0OwANwI8f4UDErmwh -3El+fsqyjW22v5MvoVw+j8rtgI5Y4dtXz4U2OLJxpAmMkokIiEjxQGMYsluMWuPD -0xeqqxmjLBvk1cbiZnrXghmmOxYsL3GHX0WelXOTwkKBIROW1527k2gV+p2kHYzy -geBYBr3JtuP2iV2J+axEoctr+hbxx1A9JNr3w+SH1VbxT5Aw+kUJWdo0zuATHAR8 -ANSbhqRAvNncTFd+rrcztl524WWLZt+NyteYr842mIycg5kDcPOvdO3GDjbnvezB -c6eUWsuSZIKmAMFwoW4sKeFYV+xafJlrJaSQOoD0IJ2azsct+bJLKZWD6TWNp0lI -pw9MGZHQ9b8Q4HECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQU -dEmZ0f+0emhFdcN+tNzMzjkz2ggwDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB -DAUAA4ICAQAjQ1MkYlxt/T7Cz1UAbMVWiLkO3TriJQ2VSpfKgInuKs1l+NsW4AmS -4BjHeJi78+xCUvuppILXTdiK/ORO/auQxDh1MoSf/7OwKwIzNsAQkG8dnK/haZPs -o0UvFJ/1TCplQ3IM98P4lYsU84UgYt1UU90s3BiVaU+DR3BAM1h3Egyi61IxHkzJ -qM7F78PRreBrAwA0JrRUITWXAdxfG/F851X6LWh3e9NpzNMOa7pNdkTWwhWaJuyw -xfW70Xp0wmzNxbVe9kzmWy2B27O3Opee7c9GslA9hGCZcbUztVdF5kJHdWoOsAgM -rr3e97sPWD2PAzHoPYJQyi9eDF20l74gNAf0xBLh7tew2VktafcxBPTy+av5EzH4 -AXcOPUIjJsyacmdRIXrMPIWo6iFqO9taPKU0nprALN+AnCng33eU0aKAQv9qTFsR -0PXNor6uzFFcw9VUewyu1rkGd4Di7wcaaMxZUa1+XGdrudviB0JbuAEFWDlN5LuY -o7Ey7Nmj1m+UI/87tyll5gfp77YZ6ufCOB0yiJA8EytuzO+rdwY0d4RPcuSBhPm5 -dDTedk+SKlOxJTnbPP/lPqYO5Wue/9vsL3SD3460s6neFE3/MaNFcyT6lSnMEpcE -oji2jbDwN/zIIX8/syQbPYtuzE2wFg2WHYMfRsCbvUOZ58SWLs5fyQ== ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia Global Root CA G3 O=TrustAsia Technologies, Inc. -# Label: "TrustAsia Global Root CA G3" -# Serial: 576386314500428537169965010905813481816650257167 -# MD5 Fingerprint: 30:42:1b:b7:bb:81:75:35:e4:16:4f:53:d2:94:de:04 -# SHA1 Fingerprint: 63:cf:b6:c1:27:2b:56:e4:88:8e:1c:23:9a:b6:2e:81:47:24:c3:c7 -# SHA256 Fingerprint: e0:d3:22:6a:eb:11:63:c2:e4:8f:f9:be:3b:50:b4:c6:43:1b:e7:bb:1e:ac:c5:c3:6b:5d:5e:c5:09:03:9a:08 ------BEGIN CERTIFICATE----- -MIIFpTCCA42gAwIBAgIUZPYOZXdhaqs7tOqFhLuxibhxkw8wDQYJKoZIhvcNAQEM -BQAwWjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dp -ZXMsIEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHMzAe -Fw0yMTA1MjAwMjEwMTlaFw00NjA1MTkwMjEwMTlaMFoxCzAJBgNVBAYTAkNOMSUw -IwYDVQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtU -cnVzdEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzMwggIiMA0GCSqGSIb3DQEBAQUAA4IC -DwAwggIKAoICAQDAMYJhkuSUGwoqZdC+BqmHO1ES6nBBruL7dOoKjbmzTNyPtxNS -T1QY4SxzlZHFZjtqz6xjbYdT8PfxObegQ2OwxANdV6nnRM7EoYNl9lA+sX4WuDqK -AtCWHwDNBSHvBm3dIZwZQ0WhxeiAysKtQGIXBsaqvPPW5vxQfmZCHzyLpnl5hkA1 -nyDvP+uLRx+PjsXUjrYsyUQE49RDdT/VP68czH5GX6zfZBCK70bwkPAPLfSIC7Ep -qq+FqklYqL9joDiR5rPmd2jE+SoZhLsO4fWvieylL1AgdB4SQXMeJNnKziyhWTXA -yB1GJ2Faj/lN03J5Zh6fFZAhLf3ti1ZwA0pJPn9pMRJpxx5cynoTi+jm9WAPzJMs -hH/x/Gr8m0ed262IPfN2dTPXS6TIi/n1Q1hPy8gDVI+lhXgEGvNz8teHHUGf59gX -zhqcD0r83ERoVGjiQTz+LISGNzzNPy+i2+f3VANfWdP3kXjHi3dqFuVJhZBFcnAv -kV34PmVACxmZySYgWmjBNb9Pp1Hx2BErW+Canig7CjoKH8GB5S7wprlppYiU5msT -f9FkPz2ccEblooV7WIQn3MSAPmeamseaMQ4w7OYXQJXZRe0Blqq/DPNL0WP3E1jA -uPP6Z92bfW1K/zJMtSU7/xxnD4UiWQWRkUF3gdCFTIcQcf+eQxuulXUtgQIDAQAB -o2MwYTAPBgNVHRMBAf8EBTADAQH/MB8GA1UdIwQYMBaAFEDk5PIj7zjKsK5Xf/Ih -MBY027ySMB0GA1UdDgQWBBRA5OTyI+84yrCuV3/yITAWNNu8kjAOBgNVHQ8BAf8E -BAMCAQYwDQYJKoZIhvcNAQEMBQADggIBACY7UeFNOPMyGLS0XuFlXsSUT9SnYaP4 -wM8zAQLpw6o1D/GUE3d3NZ4tVlFEbuHGLige/9rsR82XRBf34EzC4Xx8MnpmyFq2 -XFNFV1pF1AWZLy4jVe5jaN/TG3inEpQGAHUNcoTpLrxaatXeL1nHo+zSh2bbt1S1 -JKv0Q3jbSwTEb93mPmY+KfJLaHEih6D4sTNjduMNhXJEIlU/HHzp/LgV6FL6qj6j -ITk1dImmasI5+njPtqzn59ZW/yOSLlALqbUHM/Q4X6RJpstlcHboCoWASzY9M/eV -VHUl2qzEc4Jl6VL1XP04lQJqaTDFHApXB64ipCz5xUG3uOyfT0gA+QEEVcys+TIx -xHWVBqB/0Y0n3bOppHKH/lmLmnp0Ft0WpWIp6zqW3IunaFnT63eROfjXy9mPX1on -AX1daBli2MjN9LdyR75bl87yraKZk62Uy5P2EgmVtqvXO9A/EcswFi55gORngS1d -7XB4tmBZrOFdRWOPyN9yaFvqHbgB8X7754qz41SgOAngPN5C8sLtLpvzHzW2Ntjj -gKGLzZlkD8Kqq7HK9W+eQ42EVJmzbsASZthwEPEGNTNDqJwuuhQxzhB/HIbjj9LV -+Hfsm6vxL2PZQl/gZ4FkkfGXL/xuJvYz+NO1+MRiqzFRJQJ6+N1rZdVtTTDIZbpo -FGWsJwt0ivKH ------END CERTIFICATE----- - -# Issuer: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. -# Subject: CN=TrustAsia Global Root CA G4 O=TrustAsia Technologies, Inc. -# Label: "TrustAsia Global Root CA G4" -# Serial: 451799571007117016466790293371524403291602933463 -# MD5 Fingerprint: 54:dd:b2:d7:5f:d8:3e:ed:7c:e0:0b:2e:cc:ed:eb:eb -# SHA1 Fingerprint: 57:73:a5:61:5d:80:b2:e6:ac:38:82:fc:68:07:31:ac:9f:b5:92:5a -# SHA256 Fingerprint: be:4b:56:cb:50:56:c0:13:6a:52:6d:f4:44:50:8d:aa:36:a0:b5:4f:42:e4:ac:38:f7:2a:f4:70:e4:79:65:4c ------BEGIN CERTIFICATE----- -MIICVTCCAdygAwIBAgIUTyNkuI6XY57GU4HBdk7LKnQV1tcwCgYIKoZIzj0EAwMw -WjELMAkGA1UEBhMCQ04xJTAjBgNVBAoMHFRydXN0QXNpYSBUZWNobm9sb2dpZXMs -IEluYy4xJDAiBgNVBAMMG1RydXN0QXNpYSBHbG9iYWwgUm9vdCBDQSBHNDAeFw0y -MTA1MjAwMjEwMjJaFw00NjA1MTkwMjEwMjJaMFoxCzAJBgNVBAYTAkNOMSUwIwYD -VQQKDBxUcnVzdEFzaWEgVGVjaG5vbG9naWVzLCBJbmMuMSQwIgYDVQQDDBtUcnVz -dEFzaWEgR2xvYmFsIFJvb3QgQ0EgRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAATx -s8045CVD5d4ZCbuBeaIVXxVjAd7Cq92zphtnS4CDr5nLrBfbK5bKfFJV4hrhPVbw -LxYI+hW8m7tH5j/uqOFMjPXTNvk4XatwmkcN4oFBButJ+bAp3TPsUKV/eSm4IJij -YzBhMA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUpbtKl86zK3+kMd6Xg1mD -pm9xy94wHQYDVR0OBBYEFKW7SpfOsyt/pDHel4NZg6ZvccveMA4GA1UdDwEB/wQE -AwIBBjAKBggqhkjOPQQDAwNnADBkAjBe8usGzEkxn0AAbbd+NvBNEU/zy4k6LHiR -UKNbwMp1JvK/kF0LgoxgKJ/GcJpo5PECMFxYDlZ2z1jD1xCMuo6u47xkdUfFVZDj -/bpV6wfEU6s3qe4hsiFbYI89MvHVI5TWWA== ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust ECC Root-01 O=CommScope -# Subject: CN=CommScope Public Trust ECC Root-01 O=CommScope -# Label: "CommScope Public Trust ECC Root-01" -# Serial: 385011430473757362783587124273108818652468453534 -# MD5 Fingerprint: 3a:40:a7:fc:03:8c:9c:38:79:2f:3a:a2:6c:b6:0a:16 -# SHA1 Fingerprint: 07:86:c0:d8:dd:8e:c0:80:98:06:98:d0:58:7a:ef:de:a6:cc:a2:5d -# SHA256 Fingerprint: 11:43:7c:da:7b:b4:5e:41:36:5f:45:b3:9a:38:98:6b:0d:e0:0d:ef:34:8e:0c:7b:b0:87:36:33:80:0b:c3:8b ------BEGIN CERTIFICATE----- -MIICHTCCAaOgAwIBAgIUQ3CCd89NXTTxyq4yLzf39H91oJ4wCgYIKoZIzj0EAwMw -TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t -bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMTAeFw0yMTA0MjgxNzM1NDNa -Fw00NjA0MjgxNzM1NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv -cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDEw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAARLNumuV16ocNfQj3Rid8NeeqrltqLxeP0C -flfdkXmcbLlSiFS8LwS+uM32ENEp7LXQoMPwiXAZu1FlxUOcw5tjnSCDPgYLpkJE -hRGnSjot6dZoL0hOUysHP029uax3OVejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBSOB2LAUN3GGQYARnQE9/OufXVNMDAKBggq -hkjOPQQDAwNoADBlAjEAnDPfQeMjqEI2Jpc1XHvr20v4qotzVRVcrHgpD7oh2MSg -2NED3W3ROT3Ek2DS43KyAjB8xX6I01D1HiXo+k515liWpDVfG2XqYZpwI7UNo5uS -Um9poIyNStDuiw7LR47QjRE= ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust ECC Root-02 O=CommScope -# Subject: CN=CommScope Public Trust ECC Root-02 O=CommScope -# Label: "CommScope Public Trust ECC Root-02" -# Serial: 234015080301808452132356021271193974922492992893 -# MD5 Fingerprint: 59:b0:44:d5:65:4d:b8:5c:55:19:92:02:b6:d1:94:b2 -# SHA1 Fingerprint: 3c:3f:ef:57:0f:fe:65:93:86:9e:a0:fe:b0:f6:ed:8e:d1:13:c7:e5 -# SHA256 Fingerprint: 2f:fb:7f:81:3b:bb:b3:c8:9a:b4:e8:16:2d:0f:16:d7:15:09:a8:30:cc:9d:73:c2:62:e5:14:08:75:d1:ad:4a ------BEGIN CERTIFICATE----- -MIICHDCCAaOgAwIBAgIUKP2ZYEFHpgE6yhR7H+/5aAiDXX0wCgYIKoZIzj0EAwMw -TjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwiQ29t -bVNjb3BlIFB1YmxpYyBUcnVzdCBFQ0MgUm9vdC0wMjAeFw0yMTA0MjgxNzQ0NTRa -Fw00NjA0MjgxNzQ0NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21tU2Nv -cGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgRUNDIFJvb3QtMDIw -djAQBgcqhkjOPQIBBgUrgQQAIgNiAAR4MIHoYx7l63FRD/cHB8o5mXxO1Q/MMDAL -j2aTPs+9xYa9+bG3tD60B8jzljHz7aRP+KNOjSkVWLjVb3/ubCK1sK9IRQq9qEmU -v4RDsNuESgMjGWdqb8FuvAY5N9GIIvejQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYD -VR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTmGHX/72DehKT1RsfeSlXjMjZ59TAKBggq -hkjOPQQDAwNnADBkAjAmc0l6tqvmSfR9Uj/UQQSugEODZXW5hYA4O9Zv5JOGq4/n -ich/m35rChJVYaoR4HkCMHfoMXGsPHED1oQmHhS48zs73u1Z/GtMMH9ZzkXpc2AV -mkzw5l4lIhVtwodZ0LKOag== ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust RSA Root-01 O=CommScope -# Subject: CN=CommScope Public Trust RSA Root-01 O=CommScope -# Label: "CommScope Public Trust RSA Root-01" -# Serial: 354030733275608256394402989253558293562031411421 -# MD5 Fingerprint: 0e:b4:15:bc:87:63:5d:5d:02:73:d4:26:38:68:73:d8 -# SHA1 Fingerprint: 6d:0a:5f:f7:b4:23:06:b4:85:b3:b7:97:64:fc:ac:75:f5:33:f2:93 -# SHA256 Fingerprint: 02:bd:f9:6e:2a:45:dd:9b:f1:8f:c7:e1:db:df:21:a0:37:9b:a3:c9:c2:61:03:44:cf:d8:d6:06:fe:c1:ed:81 ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIUPgNJgXUWdDGOTKvVxZAplsU5EN0wDQYJKoZIhvcNAQEL -BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi -Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMTAeFw0yMTA0MjgxNjQ1 -NTRaFw00NjA0MjgxNjQ1NTNaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t -U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt -MDEwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQCwSGWjDR1C45FtnYSk -YZYSwu3D2iM0GXb26v1VWvZVAVMP8syMl0+5UMuzAURWlv2bKOx7dAvnQmtVzslh -suitQDy6uUEKBU8bJoWPQ7VAtYXR1HHcg0Hz9kXHgKKEUJdGzqAMxGBWBB0HW0al -DrJLpA6lfO741GIDuZNqihS4cPgugkY4Iw50x2tBt9Apo52AsH53k2NC+zSDO3Oj -WiE260f6GBfZumbCk6SP/F2krfxQapWsvCQz0b2If4b19bJzKo98rwjyGpg/qYFl -P8GMicWWMJoKz/TUyDTtnS+8jTiGU+6Xn6myY5QXjQ/cZip8UlF1y5mO6D1cv547 -KI2DAg+pn3LiLCuz3GaXAEDQpFSOm117RTYm1nJD68/A6g3czhLmfTifBSeolz7p -UcZsBSjBAg/pGG3svZwG1KdJ9FQFa2ww8esD1eo9anbCyxooSU1/ZOD6K9pzg4H/ -kQO9lLvkuI6cMmPNn7togbGEW682v3fuHX/3SZtS7NJ3Wn2RnU3COS3kuoL4b/JO -Hg9O5j9ZpSPcPYeoKFgo0fEbNttPxP/hjFtyjMcmAyejOQoBqsCyMWCDIqFPEgkB -Ea801M/XrmLTBQe0MXXgDW1XT2mH+VepuhX2yFJtocucH+X8eKg1mp9BFM6ltM6U -CBwJrVbl2rZJmkrqYxhTnCwuwwIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUN12mmnQywsL5x6YVEFm45P3luG0wDQYJ -KoZIhvcNAQELBQADggIBAK+nz97/4L1CjU3lIpbfaOp9TSp90K09FlxD533Ahuh6 -NWPxzIHIxgvoLlI1pKZJkGNRrDSsBTtXAOnTYtPZKdVUvhwQkZyybf5Z/Xn36lbQ -nmhUQo8mUuJM3y+Xpi/SB5io82BdS5pYV4jvguX6r2yBS5KPQJqTRlnLX3gWsWc+ -QgvfKNmwrZggvkN80V4aCRckjXtdlemrwWCrWxhkgPut4AZ9HcpZuPN4KWfGVh2v -trV0KnahP/t1MJ+UXjulYPPLXAziDslg+MkfFoom3ecnf+slpoq9uC02EJqxWE2a -aE9gVOX2RhOOiKy8IUISrcZKiX2bwdgt6ZYD9KJ0DLwAHb/WNyVntHKLr4W96ioD -j8z7PEQkguIBpQtZtjSNMgsSDesnwv1B10A8ckYpwIzqug/xBpMu95yo9GA+o/E4 -Xo4TwbM6l4c/ksp4qRyv0LAbJh6+cOx69TOY6lz/KwsETkPdY34Op054A5U+1C0w -lREQKC6/oAI+/15Z0wUOlV9TRe9rh9VIzRamloPh37MG88EU26fsHItdkJANclHn -YfkUyq+Dj7+vsQpZXdxc1+SWrVtgHdqul7I52Qb1dgAT+GhMIbA1xNxVssnBQVoc -icCMb3SgazNNtQEo/a2tiRc7ppqEvOuM6sRxJKi6KfkIsidWNTJf6jn7MZrVGczw ------END CERTIFICATE----- - -# Issuer: CN=CommScope Public Trust RSA Root-02 O=CommScope -# Subject: CN=CommScope Public Trust RSA Root-02 O=CommScope -# Label: "CommScope Public Trust RSA Root-02" -# Serial: 480062499834624527752716769107743131258796508494 -# MD5 Fingerprint: e1:29:f9:62:7b:76:e2:96:6d:f3:d4:d7:0f:ae:1f:aa -# SHA1 Fingerprint: ea:b0:e2:52:1b:89:93:4c:11:68:f2:d8:9a:ac:22:4c:a3:8a:57:ae -# SHA256 Fingerprint: ff:e9:43:d7:93:42:4b:4f:7c:44:0c:1c:3d:64:8d:53:63:f3:4b:82:dc:87:aa:7a:9f:11:8f:c5:de:e1:01:f1 ------BEGIN CERTIFICATE----- -MIIFbDCCA1SgAwIBAgIUVBa/O345lXGN0aoApYYNK496BU4wDQYJKoZIhvcNAQEL -BQAwTjELMAkGA1UEBhMCVVMxEjAQBgNVBAoMCUNvbW1TY29wZTErMCkGA1UEAwwi -Q29tbVNjb3BlIFB1YmxpYyBUcnVzdCBSU0EgUm9vdC0wMjAeFw0yMTA0MjgxNzE2 -NDNaFw00NjA0MjgxNzE2NDJaME4xCzAJBgNVBAYTAlVTMRIwEAYDVQQKDAlDb21t -U2NvcGUxKzApBgNVBAMMIkNvbW1TY29wZSBQdWJsaWMgVHJ1c3QgUlNBIFJvb3Qt -MDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDh+g77aAASyE3VrCLE -NQE7xVTlWXZjpX/rwcRqmL0yjReA61260WI9JSMZNRTpf4mnG2I81lDnNJUDMrG0 -kyI9p+Kx7eZ7Ti6Hmw0zdQreqjXnfuU2mKKuJZ6VszKWpCtYHu8//mI0SFHRtI1C -rWDaSWqVcN3SAOLMV2MCe5bdSZdbkk6V0/nLKR8YSvgBKtJjCW4k6YnS5cciTNxz -hkcAqg2Ijq6FfUrpuzNPDlJwnZXjfG2WWy09X6GDRl224yW4fKcZgBzqZUPckXk2 -LHR88mcGyYnJ27/aaL8j7dxrrSiDeS/sOKUNNwFnJ5rpM9kzXzehxfCrPfp4sOcs -n/Y+n2Dg70jpkEUeBVF4GiwSLFworA2iI540jwXmojPOEXcT1A6kHkIfhs1w/tku -FT0du7jyU1fbzMZ0KZwYszZ1OC4PVKH4kh+Jlk+71O6d6Ts2QrUKOyrUZHk2EOH5 -kQMreyBUzQ0ZGshBMjTRsJnhkB4BQDa1t/qp5Xd1pCKBXbCL5CcSD1SIxtuFdOa3 -wNemKfrb3vOTlycEVS8KbzfFPROvCgCpLIscgSjX74Yxqa7ybrjKaixUR9gqiC6v -wQcQeKwRoi9C8DfF8rhW3Q5iLc4tVn5V8qdE9isy9COoR+jUKgF4z2rDN6ieZdIs -5fq6M8EGRPbmz6UNp2YINIos8wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G -A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUR9DnsSL/nSz12Vdgs7GxcJXvYXowDQYJ -KoZIhvcNAQELBQADggIBAIZpsU0v6Z9PIpNojuQhmaPORVMbc0RTAIFhzTHjCLqB -KCh6krm2qMhDnscTJk3C2OVVnJJdUNjCK9v+5qiXz1I6JMNlZFxHMaNlNRPDk7n3 -+VGXu6TwYofF1gbTl4MgqX67tiHCpQ2EAOHyJxCDut0DgdXdaMNmEMjRdrSzbyme -APnCKfWxkxlSaRosTKCL4BWaMS/TiJVZbuXEs1DIFAhKm4sTg7GkcrI7djNB3Nyq -pgdvHSQSn8h2vS/ZjvQs7rfSOBAkNlEv41xdgSGn2rtO/+YHqP65DSdsu3BaVXoT -6fEqSWnHX4dXTEN5bTpl6TBcQe7rd6VzEojov32u5cSoHw2OHG1QAk8mGEPej1WF -sQs3BWDJVTkSBKEqz3EWnzZRSb9wO55nnPt7eck5HHisd5FUmrh1CoFSl+NmYWvt -PjgelmFV4ZFUjO2MJB+ByRCac5krFk5yAD9UG/iNuovnFNa2RU9g7Jauwy8CTl2d -lklyALKrdVwPaFsdZcJfMw8eD/A7hvWwTruc9+olBdytoptLFwG+Qt81IR2tq670 -v64fG9PiO/yzcnMcmyiQiRM9HcEARwmWmjgb3bHPDcK0RPOWlc4yOo80nOAXx17O -rg3bhzjlP1v9mxnhMUF6cKojawHhRUzNlM47ni3niAIi9G7oyOzWPPO5std3eqx7 ------END CERTIFICATE----- - -# Issuer: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH -# Subject: CN=Telekom Security TLS ECC Root 2020 O=Deutsche Telekom Security GmbH -# Label: "Telekom Security TLS ECC Root 2020" -# Serial: 72082518505882327255703894282316633856 -# MD5 Fingerprint: c1:ab:fe:6a:10:2c:03:8d:bc:1c:22:32:c0:85:a7:fd -# SHA1 Fingerprint: c0:f8:96:c5:a9:3b:01:06:21:07:da:18:42:48:bc:e9:9d:88:d5:ec -# SHA256 Fingerprint: 57:8a:f4:de:d0:85:3f:4e:59:98:db:4a:ea:f9:cb:ea:8d:94:5f:60:b6:20:a3:8d:1a:3c:13:b2:bc:7b:a8:e1 ------BEGIN CERTIFICATE----- -MIICQjCCAcmgAwIBAgIQNjqWjMlcsljN0AFdxeVXADAKBggqhkjOPQQDAzBjMQsw -CQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0eSBH -bWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBFQ0MgUm9vdCAyMDIw -MB4XDTIwMDgyNTA3NDgyMFoXDTQ1MDgyNTIzNTk1OVowYzELMAkGA1UEBhMCREUx -JzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkGA1UE -AwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgRUNDIFJvb3QgMjAyMDB2MBAGByqGSM49 -AgEGBSuBBAAiA2IABM6//leov9Wq9xCazbzREaK9Z0LMkOsVGJDZos0MKiXrPk/O -tdKPD/M12kOLAoC+b1EkHQ9rK8qfwm9QMuU3ILYg/4gND21Ju9sGpIeQkpT0CdDP -f8iAC8GXs7s1J8nCG6NCMEAwHQYDVR0OBBYEFONyzG6VmUex5rNhTNHLq+O6zd6f -MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMAoGCCqGSM49BAMDA2cA -MGQCMHVSi7ekEE+uShCLsoRbQuHmKjYC2qBuGT8lv9pZMo7k+5Dck2TOrbRBR2Di -z6fLHgIwN0GMZt9Ba9aDAEH9L1r3ULRn0SyocddDypwnJJGDSA3PzfdUga/sf+Rn -27iQ7t0l ------END CERTIFICATE----- - -# Issuer: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH -# Subject: CN=Telekom Security TLS RSA Root 2023 O=Deutsche Telekom Security GmbH -# Label: "Telekom Security TLS RSA Root 2023" -# Serial: 44676229530606711399881795178081572759 -# MD5 Fingerprint: bf:5b:eb:54:40:cd:48:71:c4:20:8d:7d:de:0a:42:f2 -# SHA1 Fingerprint: 54:d3:ac:b3:bd:57:56:f6:85:9d:ce:e5:c3:21:e2:d4:ad:83:d0:93 -# SHA256 Fingerprint: ef:c6:5c:ad:bb:59:ad:b6:ef:e8:4d:a2:23:11:b3:56:24:b7:1b:3b:1e:a0:da:8b:66:55:17:4e:c8:97:86:46 ------BEGIN CERTIFICATE----- -MIIFszCCA5ugAwIBAgIQIZxULej27HF3+k7ow3BXlzANBgkqhkiG9w0BAQwFADBj -MQswCQYDVQQGEwJERTEnMCUGA1UECgweRGV1dHNjaGUgVGVsZWtvbSBTZWN1cml0 -eSBHbWJIMSswKQYDVQQDDCJUZWxla29tIFNlY3VyaXR5IFRMUyBSU0EgUm9vdCAy -MDIzMB4XDTIzMDMyODEyMTY0NVoXDTQ4MDMyNzIzNTk1OVowYzELMAkGA1UEBhMC -REUxJzAlBgNVBAoMHkRldXRzY2hlIFRlbGVrb20gU2VjdXJpdHkgR21iSDErMCkG -A1UEAwwiVGVsZWtvbSBTZWN1cml0eSBUTFMgUlNBIFJvb3QgMjAyMzCCAiIwDQYJ -KoZIhvcNAQEBBQADggIPADCCAgoCggIBAO01oYGA88tKaVvC+1GDrib94W7zgRJ9 -cUD/h3VCKSHtgVIs3xLBGYSJwb3FKNXVS2xE1kzbB5ZKVXrKNoIENqil/Cf2SfHV -cp6R+SPWcHu79ZvB7JPPGeplfohwoHP89v+1VmLhc2o0mD6CuKyVU/QBoCcHcqMA -U6DksquDOFczJZSfvkgdmOGjup5czQRxUX11eKvzWarE4GC+j4NSuHUaQTXtvPM6 -Y+mpFEXX5lLRbtLevOP1Czvm4MS9Q2QTps70mDdsipWol8hHD/BeEIvnHRz+sTug -BTNoBUGCwQMrAcjnj02r6LX2zWtEtefdi+zqJbQAIldNsLGyMcEWzv/9FIS3R/qy -8XDe24tsNlikfLMR0cN3f1+2JeANxdKz+bi4d9s3cXFH42AYTyS2dTd4uaNir73J -co4vzLuu2+QVUhkHM/tqty1LkCiCc/4YizWN26cEar7qwU02OxY2kTLvtkCJkUPg -8qKrBC7m8kwOFjQgrIfBLX7JZkcXFBGk8/ehJImr2BrIoVyxo/eMbcgByU/J7MT8 -rFEz0ciD0cmfHdRHNCk+y7AO+oMLKFjlKdw/fKifybYKu6boRhYPluV75Gp6SG12 -mAWl3G0eQh5C2hrgUve1g8Aae3g1LDj1H/1Joy7SWWO/gLCMk3PLNaaZlSJhZQNg -+y+TS/qanIA7AgMBAAGjYzBhMA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUtqeX -gj10hZv3PJ+TmpV5dVKMbUcwDwYDVR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBS2 -p5eCPXSFm/c8n5OalXl1UoxtRzANBgkqhkiG9w0BAQwFAAOCAgEAqMxhpr51nhVQ -pGv7qHBFfLp+sVr8WyP6Cnf4mHGCDG3gXkaqk/QeoMPhk9tLrbKmXauw1GLLXrtm -9S3ul0A8Yute1hTWjOKWi0FpkzXmuZlrYrShF2Y0pmtjxrlO8iLpWA1WQdH6DErw -M807u20hOq6OcrXDSvvpfeWxm4bu4uB9tPcy/SKE8YXJN3nptT+/XOR0so8RYgDd -GGah2XsjX/GO1WfoVNpbOms2b/mBsTNHM3dA+VKq3dSDz4V4mZqTuXNnQkYRIer+ -CqkbGmVps4+uFrb2S1ayLfmlyOw7YqPta9BO1UAJpB+Y1zqlklkg5LB9zVtzaL1t -xKITDmcZuI1CfmwMmm6gJC3VRRvcxAIU/oVbZZfKTpBQCHpCNfnqwmbU+AGuHrS+ -w6jv/naaoqYfRvaE7fzbzsQCzndILIyy7MMAo+wsVRjBfhnu4S/yrYObnqsZ38aK -L4x35bcF7DvB7L6Gs4a8wPfc5+pbrrLMtTWGS9DiP7bY+A4A7l3j941Y/8+LN+lj -X273CXE2whJdV/LItM3z7gLfEdxquVeEHVlNjM7IDiPCtyaaEBRx/pOyiriA8A4Q -ntOoUAw3gi/q4Iqd4Sw5/7W0cwDk90imc6y/st53BIe0o82bNSQ3+pCTE4FCxpgm -dTdmQRCsu/WU48IxK63nI1bMNSWSs1A= ------END CERTIFICATE----- - -# Issuer: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA -# Subject: CN=FIRMAPROFESIONAL CA ROOT-A WEB O=Firmaprofesional SA -# Label: "FIRMAPROFESIONAL CA ROOT-A WEB" -# Serial: 65916896770016886708751106294915943533 -# MD5 Fingerprint: 82:b2:ad:45:00:82:b0:66:63:f8:5f:c3:67:4e:ce:a3 -# SHA1 Fingerprint: a8:31:11:74:a6:14:15:0d:ca:77:dd:0e:e4:0c:5d:58:fc:a0:72:a5 -# SHA256 Fingerprint: be:f2:56:da:f2:6e:9c:69:bd:ec:16:02:35:97:98:f3:ca:f7:18:21:a0:3e:01:82:57:c5:3c:65:61:7f:3d:4a ------BEGIN CERTIFICATE----- -MIICejCCAgCgAwIBAgIQMZch7a+JQn81QYehZ1ZMbTAKBggqhkjOPQQDAzBuMQsw -CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE -YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB -IFJPT1QtQSBXRUIwHhcNMjIwNDA2MDkwMTM2WhcNNDcwMzMxMDkwMTM2WjBuMQsw -CQYDVQQGEwJFUzEcMBoGA1UECgwTRmlybWFwcm9mZXNpb25hbCBTQTEYMBYGA1UE -YQwPVkFURVMtQTYyNjM0MDY4MScwJQYDVQQDDB5GSVJNQVBST0ZFU0lPTkFMIENB -IFJPT1QtQSBXRUIwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAARHU+osEaR3xyrq89Zf -e9MEkVz6iMYiuYMQYneEMy3pA4jU4DP37XcsSmDq5G+tbbT4TIqk5B/K6k84Si6C -cyvHZpsKjECcfIr28jlgst7L7Ljkb+qbXbdTkBgyVcUgt5SjYzBhMA8GA1UdEwEB -/wQFMAMBAf8wHwYDVR0jBBgwFoAUk+FDY1w8ndYn81LsF7Kpryz3dvgwHQYDVR0O -BBYEFJPhQ2NcPJ3WJ/NS7Beyqa8s93b4MA4GA1UdDwEB/wQEAwIBBjAKBggqhkjO -PQQDAwNoADBlAjAdfKR7w4l1M+E7qUW/Runpod3JIha3RxEL2Jq68cgLcFBTApFw -hVmpHqTm6iMxoAACMQD94vizrxa5HnPEluPBMBnYfubDl94cT7iJLzPrSA8Z94dG -XSaQpYXFuXqUPoeovQA= ------END CERTIFICATE----- - -# Issuer: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA -# Subject: CN=TWCA CYBER Root CA O=TAIWAN-CA OU=Root CA -# Label: "TWCA CYBER Root CA" -# Serial: 85076849864375384482682434040119489222 -# MD5 Fingerprint: 0b:33:a0:97:52:95:d4:a9:fd:bb:db:6e:a3:55:5b:51 -# SHA1 Fingerprint: f6:b1:1c:1a:83:38:e9:7b:db:b3:a8:c8:33:24:e0:2d:9c:7f:26:66 -# SHA256 Fingerprint: 3f:63:bb:28:14:be:17:4e:c8:b6:43:9c:f0:8d:6d:56:f0:b7:c4:05:88:3a:56:48:a3:34:42:4d:6b:3e:c5:58 ------BEGIN CERTIFICATE----- -MIIFjTCCA3WgAwIBAgIQQAE0jMIAAAAAAAAAATzyxjANBgkqhkiG9w0BAQwFADBQ -MQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FOLUNBMRAwDgYDVQQLEwdSb290 -IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3QgQ0EwHhcNMjIxMTIyMDY1NDI5 -WhcNNDcxMTIyMTU1OTU5WjBQMQswCQYDVQQGEwJUVzESMBAGA1UEChMJVEFJV0FO -LUNBMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJUV0NBIENZQkVSIFJvb3Qg -Q0EwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDG+Moe2Qkgfh1sTs6P -40czRJzHyWmqOlt47nDSkvgEs1JSHWdyKKHfi12VCv7qze33Kc7wb3+szT3vsxxF -avcokPFhV8UMxKNQXd7UtcsZyoC5dc4pztKFIuwCY8xEMCDa6pFbVuYdHNWdZsc/ -34bKS1PE2Y2yHer43CdTo0fhYcx9tbD47nORxc5zb87uEB8aBs/pJ2DFTxnk684i -JkXXYJndzk834H/nY62wuFm40AZoNWDTNq5xQwTxaWV4fPMf88oon1oglWa0zbfu -j3ikRRjpJi+NmykosaS3Om251Bw4ckVYsV7r8Cibt4LK/c/WMw+f+5eesRycnupf -Xtuq3VTpMCEobY5583WSjCb+3MX2w7DfRFlDo7YDKPYIMKoNM+HvnKkHIuNZW0CP -2oi3aQiotyMuRAlZN1vH4xfyIutuOVLF3lSnmMlLIJXcRolftBL5hSmO68gnFSDA -S9TMfAxsNAwmmyYxpjyn9tnQS6Jk/zuZQXLB4HCX8SS7K8R0IrGsayIyJNN4KsDA -oS/xUgXJP+92ZuJF2A09rZXIx4kmyA+upwMu+8Ff+iDhcK2wZSA3M2Cw1a/XDBzC -kHDXShi8fgGwsOsVHkQGzaRP6AzRwyAQ4VRlnrZR0Bp2a0JaWHY06rc3Ga4udfmW -5cFZ95RXKSWNOkyrTZpB0F8mAwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYD -VR0TAQH/BAUwAwEB/zAfBgNVHSMEGDAWgBSdhWEUfMFib5do5E83QOGt4A1WNzAd -BgNVHQ4EFgQUnYVhFHzBYm+XaORPN0DhreANVjcwDQYJKoZIhvcNAQEMBQADggIB -AGSPesRiDrWIzLjHhg6hShbNcAu3p4ULs3a2D6f/CIsLJc+o1IN1KriWiLb73y0t -tGlTITVX1olNc79pj3CjYcya2x6a4CD4bLubIp1dhDGaLIrdaqHXKGnK/nZVekZn -68xDiBaiA9a5F/gZbG0jAn/xX9AKKSM70aoK7akXJlQKTcKlTfjF/biBzysseKNn -TKkHmvPfXvt89YnNdJdhEGoHK4Fa0o635yDRIG4kqIQnoVesqlVYL9zZyvpoBJ7t -RCT5dEA7IzOrg1oYJkK2bVS1FmAwbLGg+LhBoF1JSdJlBTrq/p1hvIbZv97Tujqx -f36SNI7JAG7cmL3c7IAFrQI932XtCwP39xaEBDG6k5TY8hL4iuO/Qq+n1M0RFxbI -Qh0UqEL20kCGoE8jypZFVmAGzbdVAaYBlGX+bgUJurSkquLvWL69J1bY73NxW0Qz -8ppy6rBePm6pUlvscG21h483XjyMnM7k8M4MZ0HMzvaAq07MTFb1wWFZk7Q+ptq4 -NxKfKjLji7gh7MMrZQzvIt6IKTtM1/r+t+FHvpw+PoP7UV31aPcuIYXcv/Fa4nzX -xeSDwWrruoBa3lwtcHb4yOWHh8qgnaHlIhInD0Q9HWzq1MKLL295q39QpsQZp6F6 -t5b5wR9iWqJDB0BeJsas7a5wFsWqynKKTbDPAYsDP27X ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA12 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA12" -# Serial: 587887345431707215246142177076162061960426065942 -# MD5 Fingerprint: c6:89:ca:64:42:9b:62:08:49:0b:1e:7f:e9:07:3d:e8 -# SHA1 Fingerprint: 7a:22:1e:3d:de:1b:06:ac:9e:c8:47:70:16:8e:3c:e5:f7:6b:06:f4 -# SHA256 Fingerprint: 3f:03:4b:b5:70:4d:44:b2:d0:85:45:a0:20:57:de:93:eb:f3:90:5f:ce:72:1a:cb:c7:30:c0:6d:da:ee:90:4e ------BEGIN CERTIFICATE----- -MIIDcjCCAlqgAwIBAgIUZvnHwa/swlG07VOX5uaCwysckBYwDQYJKoZIhvcNAQEL -BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u -LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExMjAeFw0yMDA0MDgw -NTM2NDZaFw00MDA0MDgwNTM2NDZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD -eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS -b290IENBMTIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6OcE3emhF -KxS06+QT61d1I02PJC0W6K6OyX2kVzsqdiUzg2zqMoqUm048luT9Ub+ZyZN+v/mt -p7JIKwccJ/VMvHASd6SFVLX9kHrko+RRWAPNEHl57muTH2SOa2SroxPjcf59q5zd -J1M3s6oYwlkm7Fsf0uZlfO+TvdhYXAvA42VvPMfKWeP+bl+sg779XSVOKik71gur -FzJ4pOE+lEa+Ym6b3kaosRbnhW70CEBFEaCeVESE99g2zvVQR9wsMJvuwPWW0v4J -hscGWa5Pro4RmHvzC1KqYiaqId+OJTN5lxZJjfU+1UefNzFJM3IFTQy2VYzxV4+K -h9GtxRESOaCtAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQD -AgEGMB0GA1UdDgQWBBRXNPN0zwRL1SXm8UC2LEzZLemgrTANBgkqhkiG9w0BAQsF -AAOCAQEAPrvbFxbS8hQBICw4g0utvsqFepq2m2um4fylOqyttCg6r9cBg0krY6Ld -mmQOmFxv3Y67ilQiLUoT865AQ9tPkbeGGuwAtEGBpE/6aouIs3YIcipJQMPTw4WJ -mBClnW8Zt7vPemVV2zfrPIpyMpcemik+rY3moxtt9XUa5rBouVui7mlHJzWhhpmA -8zNL4WukJsPvdFlseqJkth5Ew1DgDzk9qTPxpfPSvWKErI4cqc1avTc7bgoitPQV -55FYxTpE05Uo2cBl6XLK0A+9H7MV2anjpEcJnuDLN/v9vZfVvhgaaaI5gdka9at/ -yOPiZwud9AzqVN/Ssq+xIvEg37xEHA== ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA14 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA14" -# Serial: 575790784512929437950770173562378038616896959179 -# MD5 Fingerprint: 71:0d:72:fa:92:19:65:5e:89:04:ac:16:33:f0:bc:d5 -# SHA1 Fingerprint: dd:50:c0:f7:79:b3:64:2e:74:a2:b8:9d:9f:d3:40:dd:bb:f0:f2:4f -# SHA256 Fingerprint: 4b:00:9c:10:34:49:4f:9a:b5:6b:ba:3b:a1:d6:27:31:fc:4d:20:d8:95:5a:dc:ec:10:a9:25:60:72:61:e3:38 ------BEGIN CERTIFICATE----- -MIIFcjCCA1qgAwIBAgIUZNtaDCBO6Ncpd8hQJ6JaJ90t8sswDQYJKoZIhvcNAQEM -BQAwUTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28u -LCBMdGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNDAeFw0yMDA0MDgw -NzA2MTlaFw00NTA0MDgwNzA2MTlaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpD -eWJlcnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBS -b290IENBMTQwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQDF0nqh1oq/ -FjHQmNE6lPxauG4iwWL3pwon71D2LrGeaBLwbCRjOfHw3xDG3rdSINVSW0KZnvOg -vlIfX8xnbacuUKLBl422+JX1sLrcneC+y9/3OPJH9aaakpUqYllQC6KxNedlsmGy -6pJxaeQp8E+BgQQ8sqVb1MWoWWd7VRxJq3qdwudzTe/NCcLEVxLbAQ4jeQkHO6Lo -/IrPj8BGJJw4J+CDnRugv3gVEOuGTgpa/d/aLIJ+7sr2KeH6caH3iGicnPCNvg9J -kdjqOvn90Ghx2+m1K06Ckm9mH+Dw3EzsytHqunQG+bOEkJTRX45zGRBdAuVwpcAQ -0BB8b8VYSbSwbprafZX1zNoCr7gsfXmPvkPx+SgojQlD+Ajda8iLLCSxjVIHvXib -y8posqTdDEx5YMaZ0ZPxMBoH064iwurO8YQJzOAUbn8/ftKChazcqRZOhaBgy/ac -18izju3Gm5h1DVXoX+WViwKkrkMpKBGk5hIwAUt1ax5mnXkvpXYvHUC0bcl9eQjs -0Wq2XSqypWa9a4X0dFbD9ed1Uigspf9mR6XU/v6eVL9lfgHWMI+lNpyiUBzuOIAB -SMbHdPTGrMNASRZhdCyvjG817XsYAFs2PJxQDcqSMxDxJklt33UkN4Ii1+iW/RVL -ApY+B3KVfqs9TC7XyvDf4Fg/LS8EmjijAQIDAQABo0IwQDAPBgNVHRMBAf8EBTAD -AQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUBpOjCl4oaTeqYR3r6/wtbyPk -86AwDQYJKoZIhvcNAQEMBQADggIBAJaAcgkGfpzMkwQWu6A6jZJOtxEaCnFxEM0E -rX+lRVAQZk5KQaID2RFPeje5S+LGjzJmdSX7684/AykmjbgWHfYfM25I5uj4V7Ib -ed87hwriZLoAymzvftAj63iP/2SbNDefNWWipAA9EiOWWF3KY4fGoweITedpdopT -zfFP7ELyk+OZpDc8h7hi2/DsHzc/N19DzFGdtfCXwreFamgLRB7lUe6TzktuhsHS -DCRZNhqfLJGP4xjblJUK7ZGqDpncllPjYYPGFrojutzdfhrGe0K22VoF3Jpf1d+4 -2kd92jjbrDnVHmtsKheMYc2xbXIBw8MgAGJoFjHVdqqGuw6qnsb58Nn4DSEC5MUo -FlkRudlpcyqSeLiSV5sI8jrlL5WwWLdrIBRtFO8KvH7YVdiI2i/6GaX7i+B/OfVy -K4XELKzvGUWSTLNhB9xNH27SgRNcmvMSZ4PPmz+Ln52kuaiWA3rF7iDeM9ovnhp6 -dB7h7sxaOgTdsxoEqBRjrLdHEoOabPXm6RUVkRqEGQ6UROcSjiVbgGcZ3GOTEAtl -Lor6CZpO2oYofaphNdgOpygau1LgePhsumywbrmHXumZNTfxPWQrqaA0k89jL9WB -365jJ6UeTo3cKXhZ+PmhIIynJkBugnLNeLLIjzwec+fBH7/PzqUqm9tEZDKgu39c -JRNItX+S ------END CERTIFICATE----- - -# Issuer: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. -# Subject: CN=SecureSign Root CA15 O=Cybertrust Japan Co., Ltd. -# Label: "SecureSign Root CA15" -# Serial: 126083514594751269499665114766174399806381178503 -# MD5 Fingerprint: 13:30:fc:c4:62:a6:a9:de:b5:c1:68:af:b5:d2:31:47 -# SHA1 Fingerprint: cb:ba:83:c8:c1:5a:5d:f1:f9:73:6f:ca:d7:ef:28:13:06:4a:07:7d -# SHA256 Fingerprint: e7:78:f0:f0:95:fe:84:37:29:cd:1a:00:82:17:9e:53:14:a9:c2:91:44:28:05:e1:fb:1d:8f:b6:b8:88:6c:3a ------BEGIN CERTIFICATE----- -MIICIzCCAamgAwIBAgIUFhXHw9hJp75pDIqI7fBw+d23PocwCgYIKoZIzj0EAwMw -UTELMAkGA1UEBhMCSlAxIzAhBgNVBAoTGkN5YmVydHJ1c3QgSmFwYW4gQ28uLCBM -dGQuMR0wGwYDVQQDExRTZWN1cmVTaWduIFJvb3QgQ0ExNTAeFw0yMDA0MDgwODMy -NTZaFw00NTA0MDgwODMyNTZaMFExCzAJBgNVBAYTAkpQMSMwIQYDVQQKExpDeWJl -cnRydXN0IEphcGFuIENvLiwgTHRkLjEdMBsGA1UEAxMUU2VjdXJlU2lnbiBSb290 -IENBMTUwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQLUHSNZDKZmbPSYAi4Io5GdCx4 -wCtELW1fHcmuS1Iggz24FG1Th2CeX2yF2wYUleDHKP+dX+Sq8bOLbe1PL0vJSpSR -ZHX+AezB2Ot6lHhWGENfa4HL9rzatAy2KZMIaY+jQjBAMA8GA1UdEwEB/wQFMAMB -Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTrQciu/NWeUUj1vYv0hyCTQSvT -9DAKBggqhkjOPQQDAwNoADBlAjEA2S6Jfl5OpBEHvVnCB96rMjhTKkZEBhd6zlHp -4P9mLQlO4E/0BdGF9jVg3PVys0Z9AjBEmEYagoUeYWmJSwdLZrWeqrqgHkHZAXQ6 -bkU6iYAZezKYVWOr62Nuk22rGwlgMU4= ------END CERTIFICATE----- diff --git a/myenv/Lib/site-packages/certifi/core.py b/myenv/Lib/site-packages/certifi/core.py deleted file mode 100644 index 91f538b..0000000 --- a/myenv/Lib/site-packages/certifi/core.py +++ /dev/null @@ -1,114 +0,0 @@ -""" -certifi.py -~~~~~~~~~~ - -This module returns the installation location of cacert.pem or its contents. -""" -import sys -import atexit - -def exit_cacert_ctx() -> None: - _CACERT_CTX.__exit__(None, None, None) # type: ignore[union-attr] - - -if sys.version_info >= (3, 11): - - from importlib.resources import as_file, files - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the file - # in cases where we're inside of a zipimport situation until someone - # actually calls where(), but we don't want to re-extract the file - # on every call of where(), so we'll do it once then store it in a - # global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you to - # manage the cleanup of this file, so it doesn't actually return a - # path, it returns a context manager that will give you the path - # when you enter it and will do any cleanup when you leave it. In - # the common case of not needing a temporary file, it will just - # return the file system location and the __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = as_file(files("certifi").joinpath("cacert.pem")) - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - atexit.register(exit_cacert_ctx) - - return _CACERT_PATH - - def contents() -> str: - return files("certifi").joinpath("cacert.pem").read_text(encoding="ascii") - -elif sys.version_info >= (3, 7): - - from importlib.resources import path as get_path, read_text - - _CACERT_CTX = None - _CACERT_PATH = None - - def where() -> str: - # This is slightly terrible, but we want to delay extracting the - # file in cases where we're inside of a zipimport situation until - # someone actually calls where(), but we don't want to re-extract - # the file on every call of where(), so we'll do it once then store - # it in a global variable. - global _CACERT_CTX - global _CACERT_PATH - if _CACERT_PATH is None: - # This is slightly janky, the importlib.resources API wants you - # to manage the cleanup of this file, so it doesn't actually - # return a path, it returns a context manager that will give - # you the path when you enter it and will do any cleanup when - # you leave it. In the common case of not needing a temporary - # file, it will just return the file system location and the - # __exit__() is a no-op. - # - # We also have to hold onto the actual context manager, because - # it will do the cleanup whenever it gets garbage collected, so - # we will also store that at the global level as well. - _CACERT_CTX = get_path("certifi", "cacert.pem") - _CACERT_PATH = str(_CACERT_CTX.__enter__()) - atexit.register(exit_cacert_ctx) - - return _CACERT_PATH - - def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") - -else: - import os - import types - from typing import Union - - Package = Union[types.ModuleType, str] - Resource = Union[str, "os.PathLike"] - - # This fallback will work for Python versions prior to 3.7 that lack the - # importlib.resources module but relies on the existing `where` function - # so won't address issues with environments like PyOxidizer that don't set - # __file__ on modules. - def read_text( - package: Package, - resource: Resource, - encoding: str = 'utf-8', - errors: str = 'strict' - ) -> str: - with open(where(), encoding=encoding) as data: - return data.read() - - # If we don't have importlib.resources, then we will just do the old logic - # of assuming we're on the filesystem and munge the path directly. - def where() -> str: - f = os.path.dirname(__file__) - - return os.path.join(f, "cacert.pem") - - def contents() -> str: - return read_text("certifi", "cacert.pem", encoding="ascii") diff --git a/myenv/Lib/site-packages/certifi/py.typed b/myenv/Lib/site-packages/certifi/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/click-8.1.7.dist-info/INSTALLER b/myenv/Lib/site-packages/click-8.1.7.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/click-8.1.7.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/click-8.1.7.dist-info/LICENSE.rst b/myenv/Lib/site-packages/click-8.1.7.dist-info/LICENSE.rst deleted file mode 100644 index d12a849..0000000 --- a/myenv/Lib/site-packages/click-8.1.7.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2014 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/Lib/site-packages/click-8.1.7.dist-info/METADATA b/myenv/Lib/site-packages/click-8.1.7.dist-info/METADATA deleted file mode 100644 index 7a6bbb2..0000000 --- a/myenv/Lib/site-packages/click-8.1.7.dist-info/METADATA +++ /dev/null @@ -1,103 +0,0 @@ -Metadata-Version: 2.1 -Name: click -Version: 8.1.7 -Summary: Composable command line interface toolkit -Home-page: https://palletsprojects.com/p/click/ -Maintainer: Pallets -Maintainer-email: contact@palletsprojects.com -License: BSD-3-Clause -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Documentation, https://click.palletsprojects.com/ -Project-URL: Changes, https://click.palletsprojects.com/changes/ -Project-URL: Source Code, https://github.com/pallets/click/ -Project-URL: Issue Tracker, https://github.com/pallets/click/issues/ -Project-URL: Chat, https://discord.gg/pallets -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE.rst -Requires-Dist: colorama ; platform_system == "Windows" -Requires-Dist: importlib-metadata ; python_version < "3.8" - -\$ click\_ -========== - -Click is a Python package for creating beautiful command line interfaces -in a composable way with as little code as necessary. It's the "Command -Line Interface Creation Kit". It's highly configurable but comes with -sensible defaults out of the box. - -It aims to make the process of writing command line tools quick and fun -while also preventing any frustration caused by the inability to -implement an intended CLI API. - -Click in three points: - -- Arbitrary nesting of commands -- Automatic help page generation -- Supports lazy loading of subcommands at runtime - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U click - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -.. code-block:: python - - import click - - @click.command() - @click.option("--count", default=1, help="Number of greetings.") - @click.option("--name", prompt="Your name", help="The person to greet.") - def hello(count, name): - """Simple program that greets NAME for a total of COUNT times.""" - for _ in range(count): - click.echo(f"Hello, {name}!") - - if __name__ == '__main__': - hello() - -.. code-block:: text - - $ python hello.py --count=3 - Your name: Click - Hello, Click! - Hello, Click! - Hello, Click! - - -Donate ------- - -The Pallets organization develops and supports Click and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, `please -donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://click.palletsprojects.com/ -- Changes: https://click.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/click/ -- Source Code: https://github.com/pallets/click -- Issue Tracker: https://github.com/pallets/click/issues -- Chat: https://discord.gg/pallets diff --git a/myenv/Lib/site-packages/click-8.1.7.dist-info/RECORD b/myenv/Lib/site-packages/click-8.1.7.dist-info/RECORD deleted file mode 100644 index 95ff8c6..0000000 --- a/myenv/Lib/site-packages/click-8.1.7.dist-info/RECORD +++ /dev/null @@ -1,39 +0,0 @@ -click-8.1.7.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -click-8.1.7.dist-info/LICENSE.rst,sha256=morRBqOU6FO_4h9C9OctWSgZoigF2ZG18ydQKSkrZY0,1475 -click-8.1.7.dist-info/METADATA,sha256=qIMevCxGA9yEmJOM_4WHuUJCwWpsIEVbCPOhs45YPN4,3014 -click-8.1.7.dist-info/RECORD,, -click-8.1.7.dist-info/WHEEL,sha256=5sUXSg9e4bi7lTLOHcm6QEYwO5TIF1TNbTSVFVjcJcc,92 -click-8.1.7.dist-info/top_level.txt,sha256=J1ZQogalYS4pphY_lPECoNMfw0HzTSrZglC4Yfwo4xA,6 -click/__init__.py,sha256=YDDbjm406dTOA0V8bTtdGnhN7zj5j-_dFRewZF_pLvw,3138 -click/__pycache__/__init__.cpython-312.pyc,, -click/__pycache__/_compat.cpython-312.pyc,, -click/__pycache__/_termui_impl.cpython-312.pyc,, -click/__pycache__/_textwrap.cpython-312.pyc,, -click/__pycache__/_winconsole.cpython-312.pyc,, -click/__pycache__/core.cpython-312.pyc,, -click/__pycache__/decorators.cpython-312.pyc,, -click/__pycache__/exceptions.cpython-312.pyc,, -click/__pycache__/formatting.cpython-312.pyc,, -click/__pycache__/globals.cpython-312.pyc,, -click/__pycache__/parser.cpython-312.pyc,, -click/__pycache__/shell_completion.cpython-312.pyc,, -click/__pycache__/termui.cpython-312.pyc,, -click/__pycache__/testing.cpython-312.pyc,, -click/__pycache__/types.cpython-312.pyc,, -click/__pycache__/utils.cpython-312.pyc,, -click/_compat.py,sha256=5318agQpbt4kroKsbqDOYpTSWzL_YCZVUQiTT04yXmc,18744 -click/_termui_impl.py,sha256=3dFYv4445Nw-rFvZOTBMBPYwB1bxnmNk9Du6Dm_oBSU,24069 -click/_textwrap.py,sha256=10fQ64OcBUMuK7mFvh8363_uoOxPlRItZBmKzRJDgoY,1353 -click/_winconsole.py,sha256=5ju3jQkcZD0W27WEMGqmEP4y_crUVzPCqsX_FYb7BO0,7860 -click/core.py,sha256=j6oEWtGgGna8JarD6WxhXmNnxLnfRjwXglbBc-8jr7U,114086 -click/decorators.py,sha256=-ZlbGYgV-oI8jr_oH4RpuL1PFS-5QmeuEAsLDAYgxtw,18719 -click/exceptions.py,sha256=fyROO-47HWFDjt2qupo7A3J32VlpM-ovJnfowu92K3s,9273 -click/formatting.py,sha256=Frf0-5W33-loyY_i9qrwXR8-STnW3m5gvyxLVUdyxyk,9706 -click/globals.py,sha256=TP-qM88STzc7f127h35TD_v920FgfOD2EwzqA0oE8XU,1961 -click/parser.py,sha256=LKyYQE9ZLj5KgIDXkrcTHQRXIggfoivX14_UVIn56YA,19067 -click/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -click/shell_completion.py,sha256=Ty3VM_ts0sQhj6u7eFTiLwHPoTgcXTGEAUg2OpLqYKw,18460 -click/termui.py,sha256=H7Q8FpmPelhJ2ovOhfCRhjMtCpNyjFXryAMLZODqsdc,28324 -click/testing.py,sha256=1Qd4kS5bucn1hsNIRryd0WtTMuCpkA93grkWxT8POsU,16084 -click/types.py,sha256=TZvz3hKvBztf-Hpa2enOmP4eznSPLzijjig5b_0XMxE,36391 -click/utils.py,sha256=1476UduUNY6UePGU4m18uzVHLt1sKM2PP3yWsQhbItM,20298 diff --git a/myenv/Lib/site-packages/click-8.1.7.dist-info/WHEEL b/myenv/Lib/site-packages/click-8.1.7.dist-info/WHEEL deleted file mode 100644 index 2c08da0..0000000 --- a/myenv/Lib/site-packages/click-8.1.7.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.41.1) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/myenv/Lib/site-packages/click-8.1.7.dist-info/top_level.txt b/myenv/Lib/site-packages/click-8.1.7.dist-info/top_level.txt deleted file mode 100644 index dca9a90..0000000 --- a/myenv/Lib/site-packages/click-8.1.7.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -click diff --git a/myenv/Lib/site-packages/click/__init__.py b/myenv/Lib/site-packages/click/__init__.py deleted file mode 100644 index 9a1dab0..0000000 --- a/myenv/Lib/site-packages/click/__init__.py +++ /dev/null @@ -1,73 +0,0 @@ -""" -Click is a simple Python module inspired by the stdlib optparse to make -writing command line scripts fun. Unlike other modules, it's based -around a simple API that does not come with too much magic and is -composable. -""" -from .core import Argument as Argument -from .core import BaseCommand as BaseCommand -from .core import Command as Command -from .core import CommandCollection as CommandCollection -from .core import Context as Context -from .core import Group as Group -from .core import MultiCommand as MultiCommand -from .core import Option as Option -from .core import Parameter as Parameter -from .decorators import argument as argument -from .decorators import command as command -from .decorators import confirmation_option as confirmation_option -from .decorators import group as group -from .decorators import help_option as help_option -from .decorators import make_pass_decorator as make_pass_decorator -from .decorators import option as option -from .decorators import pass_context as pass_context -from .decorators import pass_obj as pass_obj -from .decorators import password_option as password_option -from .decorators import version_option as version_option -from .exceptions import Abort as Abort -from .exceptions import BadArgumentUsage as BadArgumentUsage -from .exceptions import BadOptionUsage as BadOptionUsage -from .exceptions import BadParameter as BadParameter -from .exceptions import ClickException as ClickException -from .exceptions import FileError as FileError -from .exceptions import MissingParameter as MissingParameter -from .exceptions import NoSuchOption as NoSuchOption -from .exceptions import UsageError as UsageError -from .formatting import HelpFormatter as HelpFormatter -from .formatting import wrap_text as wrap_text -from .globals import get_current_context as get_current_context -from .parser import OptionParser as OptionParser -from .termui import clear as clear -from .termui import confirm as confirm -from .termui import echo_via_pager as echo_via_pager -from .termui import edit as edit -from .termui import getchar as getchar -from .termui import launch as launch -from .termui import pause as pause -from .termui import progressbar as progressbar -from .termui import prompt as prompt -from .termui import secho as secho -from .termui import style as style -from .termui import unstyle as unstyle -from .types import BOOL as BOOL -from .types import Choice as Choice -from .types import DateTime as DateTime -from .types import File as File -from .types import FLOAT as FLOAT -from .types import FloatRange as FloatRange -from .types import INT as INT -from .types import IntRange as IntRange -from .types import ParamType as ParamType -from .types import Path as Path -from .types import STRING as STRING -from .types import Tuple as Tuple -from .types import UNPROCESSED as UNPROCESSED -from .types import UUID as UUID -from .utils import echo as echo -from .utils import format_filename as format_filename -from .utils import get_app_dir as get_app_dir -from .utils import get_binary_stream as get_binary_stream -from .utils import get_text_stream as get_text_stream -from .utils import open_file as open_file - -__version__ = "8.1.7" diff --git a/myenv/Lib/site-packages/click/_compat.py b/myenv/Lib/site-packages/click/_compat.py deleted file mode 100644 index 23f8866..0000000 --- a/myenv/Lib/site-packages/click/_compat.py +++ /dev/null @@ -1,623 +0,0 @@ -import codecs -import io -import os -import re -import sys -import typing as t -from weakref import WeakKeyDictionary - -CYGWIN = sys.platform.startswith("cygwin") -WIN = sys.platform.startswith("win") -auto_wrap_for_ansi: t.Optional[t.Callable[[t.TextIO], t.TextIO]] = None -_ansi_re = re.compile(r"\033\[[;?0-9]*[a-zA-Z]") - - -def _make_text_stream( - stream: t.BinaryIO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if encoding is None: - encoding = get_best_encoding(stream) - if errors is None: - errors = "replace" - return _NonClosingTextIOWrapper( - stream, - encoding, - errors, - line_buffering=True, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def is_ascii_encoding(encoding: str) -> bool: - """Checks if a given encoding is ascii.""" - try: - return codecs.lookup(encoding).name == "ascii" - except LookupError: - return False - - -def get_best_encoding(stream: t.IO[t.Any]) -> str: - """Returns the default stream encoding if not found.""" - rv = getattr(stream, "encoding", None) or sys.getdefaultencoding() - if is_ascii_encoding(rv): - return "utf-8" - return rv - - -class _NonClosingTextIOWrapper(io.TextIOWrapper): - def __init__( - self, - stream: t.BinaryIO, - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, - force_writable: bool = False, - **extra: t.Any, - ) -> None: - self._stream = stream = t.cast( - t.BinaryIO, _FixupStream(stream, force_readable, force_writable) - ) - super().__init__(stream, encoding, errors, **extra) - - def __del__(self) -> None: - try: - self.detach() - except Exception: - pass - - def isatty(self) -> bool: - # https://bitbucket.org/pypy/pypy/issue/1803 - return self._stream.isatty() - - -class _FixupStream: - """The new io interface needs more from streams than streams - traditionally implement. As such, this fix-up code is necessary in - some circumstances. - - The forcing of readable and writable flags are there because some tools - put badly patched objects on sys (one such offender are certain version - of jupyter notebook). - """ - - def __init__( - self, - stream: t.BinaryIO, - force_readable: bool = False, - force_writable: bool = False, - ): - self._stream = stream - self._force_readable = force_readable - self._force_writable = force_writable - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._stream, name) - - def read1(self, size: int) -> bytes: - f = getattr(self._stream, "read1", None) - - if f is not None: - return t.cast(bytes, f(size)) - - return self._stream.read(size) - - def readable(self) -> bool: - if self._force_readable: - return True - x = getattr(self._stream, "readable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.read(0) - except Exception: - return False - return True - - def writable(self) -> bool: - if self._force_writable: - return True - x = getattr(self._stream, "writable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.write("") # type: ignore - except Exception: - try: - self._stream.write(b"") - except Exception: - return False - return True - - def seekable(self) -> bool: - x = getattr(self._stream, "seekable", None) - if x is not None: - return t.cast(bool, x()) - try: - self._stream.seek(self._stream.tell()) - except Exception: - return False - return True - - -def _is_binary_reader(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - return isinstance(stream.read(0), bytes) - except Exception: - return default - # This happens in some cases where the stream was already - # closed. In this case, we assume the default. - - -def _is_binary_writer(stream: t.IO[t.Any], default: bool = False) -> bool: - try: - stream.write(b"") - except Exception: - try: - stream.write("") - return False - except Exception: - pass - return default - return True - - -def _find_binary_reader(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_reader(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_reader(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _find_binary_writer(stream: t.IO[t.Any]) -> t.Optional[t.BinaryIO]: - # We need to figure out if the given stream is already binary. - # This can happen because the official docs recommend detaching - # the streams to get binary streams. Some code might do this, so - # we need to deal with this case explicitly. - if _is_binary_writer(stream, False): - return t.cast(t.BinaryIO, stream) - - buf = getattr(stream, "buffer", None) - - # Same situation here; this time we assume that the buffer is - # actually binary in case it's closed. - if buf is not None and _is_binary_writer(buf, True): - return t.cast(t.BinaryIO, buf) - - return None - - -def _stream_is_misconfigured(stream: t.TextIO) -> bool: - """A stream is misconfigured if its encoding is ASCII.""" - # If the stream does not have an encoding set, we assume it's set - # to ASCII. This appears to happen in certain unittest - # environments. It's not quite clear what the correct behavior is - # but this at least will force Click to recover somehow. - return is_ascii_encoding(getattr(stream, "encoding", None) or "ascii") - - -def _is_compat_stream_attr(stream: t.TextIO, attr: str, value: t.Optional[str]) -> bool: - """A stream attribute is compatible if it is equal to the - desired value or the desired value is unset and the attribute - has a value. - """ - stream_value = getattr(stream, attr, None) - return stream_value == value or (value is None and stream_value is not None) - - -def _is_compatible_text_stream( - stream: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] -) -> bool: - """Check if a stream's encoding and errors attributes are - compatible with the desired values. - """ - return _is_compat_stream_attr( - stream, "encoding", encoding - ) and _is_compat_stream_attr(stream, "errors", errors) - - -def _force_correct_text_stream( - text_stream: t.IO[t.Any], - encoding: t.Optional[str], - errors: t.Optional[str], - is_binary: t.Callable[[t.IO[t.Any], bool], bool], - find_binary: t.Callable[[t.IO[t.Any]], t.Optional[t.BinaryIO]], - force_readable: bool = False, - force_writable: bool = False, -) -> t.TextIO: - if is_binary(text_stream, False): - binary_reader = t.cast(t.BinaryIO, text_stream) - else: - text_stream = t.cast(t.TextIO, text_stream) - # If the stream looks compatible, and won't default to a - # misconfigured ascii encoding, return it as-is. - if _is_compatible_text_stream(text_stream, encoding, errors) and not ( - encoding is None and _stream_is_misconfigured(text_stream) - ): - return text_stream - - # Otherwise, get the underlying binary reader. - possible_binary_reader = find_binary(text_stream) - - # If that's not possible, silently use the original reader - # and get mojibake instead of exceptions. - if possible_binary_reader is None: - return text_stream - - binary_reader = possible_binary_reader - - # Default errors to replace instead of strict in order to get - # something that works. - if errors is None: - errors = "replace" - - # Wrap the binary stream in a text stream with the correct - # encoding parameters. - return _make_text_stream( - binary_reader, - encoding, - errors, - force_readable=force_readable, - force_writable=force_writable, - ) - - -def _force_correct_text_reader( - text_reader: t.IO[t.Any], - encoding: t.Optional[str], - errors: t.Optional[str], - force_readable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_reader, - encoding, - errors, - _is_binary_reader, - _find_binary_reader, - force_readable=force_readable, - ) - - -def _force_correct_text_writer( - text_writer: t.IO[t.Any], - encoding: t.Optional[str], - errors: t.Optional[str], - force_writable: bool = False, -) -> t.TextIO: - return _force_correct_text_stream( - text_writer, - encoding, - errors, - _is_binary_writer, - _find_binary_writer, - force_writable=force_writable, - ) - - -def get_binary_stdin() -> t.BinaryIO: - reader = _find_binary_reader(sys.stdin) - if reader is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdin.") - return reader - - -def get_binary_stdout() -> t.BinaryIO: - writer = _find_binary_writer(sys.stdout) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stdout.") - return writer - - -def get_binary_stderr() -> t.BinaryIO: - writer = _find_binary_writer(sys.stderr) - if writer is None: - raise RuntimeError("Was not able to determine binary stream for sys.stderr.") - return writer - - -def get_text_stdin( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdin, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_reader(sys.stdin, encoding, errors, force_readable=True) - - -def get_text_stdout( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stdout, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stdout, encoding, errors, force_writable=True) - - -def get_text_stderr( - encoding: t.Optional[str] = None, errors: t.Optional[str] = None -) -> t.TextIO: - rv = _get_windows_console_stream(sys.stderr, encoding, errors) - if rv is not None: - return rv - return _force_correct_text_writer(sys.stderr, encoding, errors, force_writable=True) - - -def _wrap_io_open( - file: t.Union[str, "os.PathLike[str]", int], - mode: str, - encoding: t.Optional[str], - errors: t.Optional[str], -) -> t.IO[t.Any]: - """Handles not passing ``encoding`` and ``errors`` in binary mode.""" - if "b" in mode: - return open(file, mode) - - return open(file, mode, encoding=encoding, errors=errors) - - -def open_stream( - filename: "t.Union[str, os.PathLike[str]]", - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - atomic: bool = False, -) -> t.Tuple[t.IO[t.Any], bool]: - binary = "b" in mode - filename = os.fspath(filename) - - # Standard streams first. These are simple because they ignore the - # atomic flag. Use fsdecode to handle Path("-"). - if os.fsdecode(filename) == "-": - if any(m in mode for m in ["w", "a", "x"]): - if binary: - return get_binary_stdout(), False - return get_text_stdout(encoding=encoding, errors=errors), False - if binary: - return get_binary_stdin(), False - return get_text_stdin(encoding=encoding, errors=errors), False - - # Non-atomic writes directly go out through the regular open functions. - if not atomic: - return _wrap_io_open(filename, mode, encoding, errors), True - - # Some usability stuff for atomic writes - if "a" in mode: - raise ValueError( - "Appending to an existing file is not supported, because that" - " would involve an expensive `copy`-operation to a temporary" - " file. Open the file in normal `w`-mode and copy explicitly" - " if that's what you're after." - ) - if "x" in mode: - raise ValueError("Use the `overwrite`-parameter instead.") - if "w" not in mode: - raise ValueError("Atomic writes only make sense with `w`-mode.") - - # Atomic writes are more complicated. They work by opening a file - # as a proxy in the same folder and then using the fdopen - # functionality to wrap it in a Python file. Then we wrap it in an - # atomic file that moves the file over on close. - import errno - import random - - try: - perm: t.Optional[int] = os.stat(filename).st_mode - except OSError: - perm = None - - flags = os.O_RDWR | os.O_CREAT | os.O_EXCL - - if binary: - flags |= getattr(os, "O_BINARY", 0) - - while True: - tmp_filename = os.path.join( - os.path.dirname(filename), - f".__atomic-write{random.randrange(1 << 32):08x}", - ) - try: - fd = os.open(tmp_filename, flags, 0o666 if perm is None else perm) - break - except OSError as e: - if e.errno == errno.EEXIST or ( - os.name == "nt" - and e.errno == errno.EACCES - and os.path.isdir(e.filename) - and os.access(e.filename, os.W_OK) - ): - continue - raise - - if perm is not None: - os.chmod(tmp_filename, perm) # in case perm includes bits in umask - - f = _wrap_io_open(fd, mode, encoding, errors) - af = _AtomicFile(f, tmp_filename, os.path.realpath(filename)) - return t.cast(t.IO[t.Any], af), True - - -class _AtomicFile: - def __init__(self, f: t.IO[t.Any], tmp_filename: str, real_filename: str) -> None: - self._f = f - self._tmp_filename = tmp_filename - self._real_filename = real_filename - self.closed = False - - @property - def name(self) -> str: - return self._real_filename - - def close(self, delete: bool = False) -> None: - if self.closed: - return - self._f.close() - os.replace(self._tmp_filename, self._real_filename) - self.closed = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._f, name) - - def __enter__(self) -> "_AtomicFile": - return self - - def __exit__(self, exc_type: t.Optional[t.Type[BaseException]], *_: t.Any) -> None: - self.close(delete=exc_type is not None) - - def __repr__(self) -> str: - return repr(self._f) - - -def strip_ansi(value: str) -> str: - return _ansi_re.sub("", value) - - -def _is_jupyter_kernel_output(stream: t.IO[t.Any]) -> bool: - while isinstance(stream, (_FixupStream, _NonClosingTextIOWrapper)): - stream = stream._stream - - return stream.__class__.__module__.startswith("ipykernel.") - - -def should_strip_ansi( - stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None -) -> bool: - if color is None: - if stream is None: - stream = sys.stdin - return not isatty(stream) and not _is_jupyter_kernel_output(stream) - return not color - - -# On Windows, wrap the output streams with colorama to support ANSI -# color codes. -# NOTE: double check is needed so mypy does not analyze this on Linux -if sys.platform.startswith("win") and WIN: - from ._winconsole import _get_windows_console_stream - - def _get_argv_encoding() -> str: - import locale - - return locale.getpreferredencoding() - - _ansi_stream_wrappers: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def auto_wrap_for_ansi( # noqa: F811 - stream: t.TextIO, color: t.Optional[bool] = None - ) -> t.TextIO: - """Support ANSI color and style codes on Windows by wrapping a - stream with colorama. - """ - try: - cached = _ansi_stream_wrappers.get(stream) - except Exception: - cached = None - - if cached is not None: - return cached - - import colorama - - strip = should_strip_ansi(stream, color) - ansi_wrapper = colorama.AnsiToWin32(stream, strip=strip) - rv = t.cast(t.TextIO, ansi_wrapper.stream) - _write = rv.write - - def _safe_write(s): - try: - return _write(s) - except BaseException: - ansi_wrapper.reset_all() - raise - - rv.write = _safe_write - - try: - _ansi_stream_wrappers[stream] = rv - except Exception: - pass - - return rv - -else: - - def _get_argv_encoding() -> str: - return getattr(sys.stdin, "encoding", None) or sys.getfilesystemencoding() - - def _get_windows_console_stream( - f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] - ) -> t.Optional[t.TextIO]: - return None - - -def term_len(x: str) -> int: - return len(strip_ansi(x)) - - -def isatty(stream: t.IO[t.Any]) -> bool: - try: - return stream.isatty() - except Exception: - return False - - -def _make_cached_stream_func( - src_func: t.Callable[[], t.Optional[t.TextIO]], - wrapper_func: t.Callable[[], t.TextIO], -) -> t.Callable[[], t.Optional[t.TextIO]]: - cache: t.MutableMapping[t.TextIO, t.TextIO] = WeakKeyDictionary() - - def func() -> t.Optional[t.TextIO]: - stream = src_func() - - if stream is None: - return None - - try: - rv = cache.get(stream) - except Exception: - rv = None - if rv is not None: - return rv - rv = wrapper_func() - try: - cache[stream] = rv - except Exception: - pass - return rv - - return func - - -_default_text_stdin = _make_cached_stream_func(lambda: sys.stdin, get_text_stdin) -_default_text_stdout = _make_cached_stream_func(lambda: sys.stdout, get_text_stdout) -_default_text_stderr = _make_cached_stream_func(lambda: sys.stderr, get_text_stderr) - - -binary_streams: t.Mapping[str, t.Callable[[], t.BinaryIO]] = { - "stdin": get_binary_stdin, - "stdout": get_binary_stdout, - "stderr": get_binary_stderr, -} - -text_streams: t.Mapping[ - str, t.Callable[[t.Optional[str], t.Optional[str]], t.TextIO] -] = { - "stdin": get_text_stdin, - "stdout": get_text_stdout, - "stderr": get_text_stderr, -} diff --git a/myenv/Lib/site-packages/click/_termui_impl.py b/myenv/Lib/site-packages/click/_termui_impl.py deleted file mode 100644 index f744657..0000000 --- a/myenv/Lib/site-packages/click/_termui_impl.py +++ /dev/null @@ -1,739 +0,0 @@ -""" -This module contains implementations for the termui module. To keep the -import time of Click down, some infrequently used functionality is -placed in this module and only imported as needed. -""" -import contextlib -import math -import os -import sys -import time -import typing as t -from gettext import gettext as _ -from io import StringIO -from types import TracebackType - -from ._compat import _default_text_stdout -from ._compat import CYGWIN -from ._compat import get_best_encoding -from ._compat import isatty -from ._compat import open_stream -from ._compat import strip_ansi -from ._compat import term_len -from ._compat import WIN -from .exceptions import ClickException -from .utils import echo - -V = t.TypeVar("V") - -if os.name == "nt": - BEFORE_BAR = "\r" - AFTER_BAR = "\n" -else: - BEFORE_BAR = "\r\033[?25l" - AFTER_BAR = "\033[?25h\n" - - -class ProgressBar(t.Generic[V]): - def __init__( - self, - iterable: t.Optional[t.Iterable[V]], - length: t.Optional[int] = None, - fill_char: str = "#", - empty_char: str = " ", - bar_template: str = "%(bar)s", - info_sep: str = " ", - show_eta: bool = True, - show_percent: t.Optional[bool] = None, - show_pos: bool = False, - item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, - label: t.Optional[str] = None, - file: t.Optional[t.TextIO] = None, - color: t.Optional[bool] = None, - update_min_steps: int = 1, - width: int = 30, - ) -> None: - self.fill_char = fill_char - self.empty_char = empty_char - self.bar_template = bar_template - self.info_sep = info_sep - self.show_eta = show_eta - self.show_percent = show_percent - self.show_pos = show_pos - self.item_show_func = item_show_func - self.label: str = label or "" - - if file is None: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - file = StringIO() - - self.file = file - self.color = color - self.update_min_steps = update_min_steps - self._completed_intervals = 0 - self.width: int = width - self.autowidth: bool = width == 0 - - if length is None: - from operator import length_hint - - length = length_hint(iterable, -1) - - if length == -1: - length = None - if iterable is None: - if length is None: - raise TypeError("iterable or length is required") - iterable = t.cast(t.Iterable[V], range(length)) - self.iter: t.Iterable[V] = iter(iterable) - self.length = length - self.pos = 0 - self.avg: t.List[float] = [] - self.last_eta: float - self.start: float - self.start = self.last_eta = time.time() - self.eta_known: bool = False - self.finished: bool = False - self.max_width: t.Optional[int] = None - self.entered: bool = False - self.current_item: t.Optional[V] = None - self.is_hidden: bool = not isatty(self.file) - self._last_line: t.Optional[str] = None - - def __enter__(self) -> "ProgressBar[V]": - self.entered = True - self.render_progress() - return self - - def __exit__( - self, - exc_type: t.Optional[t.Type[BaseException]], - exc_value: t.Optional[BaseException], - tb: t.Optional[TracebackType], - ) -> None: - self.render_finish() - - def __iter__(self) -> t.Iterator[V]: - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - self.render_progress() - return self.generator() - - def __next__(self) -> V: - # Iteration is defined in terms of a generator function, - # returned by iter(self); use that to define next(). This works - # because `self.iter` is an iterable consumed by that generator, - # so it is re-entry safe. Calling `next(self.generator())` - # twice works and does "what you want". - return next(iter(self)) - - def render_finish(self) -> None: - if self.is_hidden: - return - self.file.write(AFTER_BAR) - self.file.flush() - - @property - def pct(self) -> float: - if self.finished: - return 1.0 - return min(self.pos / (float(self.length or 1) or 1), 1.0) - - @property - def time_per_iteration(self) -> float: - if not self.avg: - return 0.0 - return sum(self.avg) / float(len(self.avg)) - - @property - def eta(self) -> float: - if self.length is not None and not self.finished: - return self.time_per_iteration * (self.length - self.pos) - return 0.0 - - def format_eta(self) -> str: - if self.eta_known: - t = int(self.eta) - seconds = t % 60 - t //= 60 - minutes = t % 60 - t //= 60 - hours = t % 24 - t //= 24 - if t > 0: - return f"{t}d {hours:02}:{minutes:02}:{seconds:02}" - else: - return f"{hours:02}:{minutes:02}:{seconds:02}" - return "" - - def format_pos(self) -> str: - pos = str(self.pos) - if self.length is not None: - pos += f"/{self.length}" - return pos - - def format_pct(self) -> str: - return f"{int(self.pct * 100): 4}%"[1:] - - def format_bar(self) -> str: - if self.length is not None: - bar_length = int(self.pct * self.width) - bar = self.fill_char * bar_length - bar += self.empty_char * (self.width - bar_length) - elif self.finished: - bar = self.fill_char * self.width - else: - chars = list(self.empty_char * (self.width or 1)) - if self.time_per_iteration != 0: - chars[ - int( - (math.cos(self.pos * self.time_per_iteration) / 2.0 + 0.5) - * self.width - ) - ] = self.fill_char - bar = "".join(chars) - return bar - - def format_progress_line(self) -> str: - show_percent = self.show_percent - - info_bits = [] - if self.length is not None and show_percent is None: - show_percent = not self.show_pos - - if self.show_pos: - info_bits.append(self.format_pos()) - if show_percent: - info_bits.append(self.format_pct()) - if self.show_eta and self.eta_known and not self.finished: - info_bits.append(self.format_eta()) - if self.item_show_func is not None: - item_info = self.item_show_func(self.current_item) - if item_info is not None: - info_bits.append(item_info) - - return ( - self.bar_template - % { - "label": self.label, - "bar": self.format_bar(), - "info": self.info_sep.join(info_bits), - } - ).rstrip() - - def render_progress(self) -> None: - import shutil - - if self.is_hidden: - # Only output the label as it changes if the output is not a - # TTY. Use file=stderr if you expect to be piping stdout. - if self._last_line != self.label: - self._last_line = self.label - echo(self.label, file=self.file, color=self.color) - - return - - buf = [] - # Update width in case the terminal has been resized - if self.autowidth: - old_width = self.width - self.width = 0 - clutter_length = term_len(self.format_progress_line()) - new_width = max(0, shutil.get_terminal_size().columns - clutter_length) - if new_width < old_width: - buf.append(BEFORE_BAR) - buf.append(" " * self.max_width) # type: ignore - self.max_width = new_width - self.width = new_width - - clear_width = self.width - if self.max_width is not None: - clear_width = self.max_width - - buf.append(BEFORE_BAR) - line = self.format_progress_line() - line_len = term_len(line) - if self.max_width is None or self.max_width < line_len: - self.max_width = line_len - - buf.append(line) - buf.append(" " * (clear_width - line_len)) - line = "".join(buf) - # Render the line only if it changed. - - if line != self._last_line: - self._last_line = line - echo(line, file=self.file, color=self.color, nl=False) - self.file.flush() - - def make_step(self, n_steps: int) -> None: - self.pos += n_steps - if self.length is not None and self.pos >= self.length: - self.finished = True - - if (time.time() - self.last_eta) < 1.0: - return - - self.last_eta = time.time() - - # self.avg is a rolling list of length <= 7 of steps where steps are - # defined as time elapsed divided by the total progress through - # self.length. - if self.pos: - step = (time.time() - self.start) / self.pos - else: - step = time.time() - self.start - - self.avg = self.avg[-6:] + [step] - - self.eta_known = self.length is not None - - def update(self, n_steps: int, current_item: t.Optional[V] = None) -> None: - """Update the progress bar by advancing a specified number of - steps, and optionally set the ``current_item`` for this new - position. - - :param n_steps: Number of steps to advance. - :param current_item: Optional item to set as ``current_item`` - for the updated position. - - .. versionchanged:: 8.0 - Added the ``current_item`` optional parameter. - - .. versionchanged:: 8.0 - Only render when the number of steps meets the - ``update_min_steps`` threshold. - """ - if current_item is not None: - self.current_item = current_item - - self._completed_intervals += n_steps - - if self._completed_intervals >= self.update_min_steps: - self.make_step(self._completed_intervals) - self.render_progress() - self._completed_intervals = 0 - - def finish(self) -> None: - self.eta_known = False - self.current_item = None - self.finished = True - - def generator(self) -> t.Iterator[V]: - """Return a generator which yields the items added to the bar - during construction, and updates the progress bar *after* the - yielded block returns. - """ - # WARNING: the iterator interface for `ProgressBar` relies on - # this and only works because this is a simple generator which - # doesn't create or manage additional state. If this function - # changes, the impact should be evaluated both against - # `iter(bar)` and `next(bar)`. `next()` in particular may call - # `self.generator()` repeatedly, and this must remain safe in - # order for that interface to work. - if not self.entered: - raise RuntimeError("You need to use progress bars in a with block.") - - if self.is_hidden: - yield from self.iter - else: - for rv in self.iter: - self.current_item = rv - - # This allows show_item_func to be updated before the - # item is processed. Only trigger at the beginning of - # the update interval. - if self._completed_intervals == 0: - self.render_progress() - - yield rv - self.update(1) - - self.finish() - self.render_progress() - - -def pager(generator: t.Iterable[str], color: t.Optional[bool] = None) -> None: - """Decide what method to use for paging through text.""" - stdout = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if stdout is None: - stdout = StringIO() - - if not isatty(sys.stdin) or not isatty(stdout): - return _nullpager(stdout, generator, color) - pager_cmd = (os.environ.get("PAGER", None) or "").strip() - if pager_cmd: - if WIN: - return _tempfilepager(generator, pager_cmd, color) - return _pipepager(generator, pager_cmd, color) - if os.environ.get("TERM") in ("dumb", "emacs"): - return _nullpager(stdout, generator, color) - if WIN or sys.platform.startswith("os2"): - return _tempfilepager(generator, "more <", color) - if hasattr(os, "system") and os.system("(less) 2>/dev/null") == 0: - return _pipepager(generator, "less", color) - - import tempfile - - fd, filename = tempfile.mkstemp() - os.close(fd) - try: - if hasattr(os, "system") and os.system(f'more "{filename}"') == 0: - return _pipepager(generator, "more", color) - return _nullpager(stdout, generator, color) - finally: - os.unlink(filename) - - -def _pipepager(generator: t.Iterable[str], cmd: str, color: t.Optional[bool]) -> None: - """Page through text by feeding it to another program. Invoking a - pager through this might support colors. - """ - import subprocess - - env = dict(os.environ) - - # If we're piping to less we might support colors under the - # condition that - cmd_detail = cmd.rsplit("/", 1)[-1].split() - if color is None and cmd_detail[0] == "less": - less_flags = f"{os.environ.get('LESS', '')}{' '.join(cmd_detail[1:])}" - if not less_flags: - env["LESS"] = "-R" - color = True - elif "r" in less_flags or "R" in less_flags: - color = True - - c = subprocess.Popen(cmd, shell=True, stdin=subprocess.PIPE, env=env) - stdin = t.cast(t.BinaryIO, c.stdin) - encoding = get_best_encoding(stdin) - try: - for text in generator: - if not color: - text = strip_ansi(text) - - stdin.write(text.encode(encoding, "replace")) - except (OSError, KeyboardInterrupt): - pass - else: - stdin.close() - - # Less doesn't respect ^C, but catches it for its own UI purposes (aborting - # search or other commands inside less). - # - # That means when the user hits ^C, the parent process (click) terminates, - # but less is still alive, paging the output and messing up the terminal. - # - # If the user wants to make the pager exit on ^C, they should set - # `LESS='-K'`. It's not our decision to make. - while True: - try: - c.wait() - except KeyboardInterrupt: - pass - else: - break - - -def _tempfilepager( - generator: t.Iterable[str], cmd: str, color: t.Optional[bool] -) -> None: - """Page through text by invoking a program on a temporary file.""" - import tempfile - - fd, filename = tempfile.mkstemp() - # TODO: This never terminates if the passed generator never terminates. - text = "".join(generator) - if not color: - text = strip_ansi(text) - encoding = get_best_encoding(sys.stdout) - with open_stream(filename, "wb")[0] as f: - f.write(text.encode(encoding)) - try: - os.system(f'{cmd} "{filename}"') - finally: - os.close(fd) - os.unlink(filename) - - -def _nullpager( - stream: t.TextIO, generator: t.Iterable[str], color: t.Optional[bool] -) -> None: - """Simply print unformatted text. This is the ultimate fallback.""" - for text in generator: - if not color: - text = strip_ansi(text) - stream.write(text) - - -class Editor: - def __init__( - self, - editor: t.Optional[str] = None, - env: t.Optional[t.Mapping[str, str]] = None, - require_save: bool = True, - extension: str = ".txt", - ) -> None: - self.editor = editor - self.env = env - self.require_save = require_save - self.extension = extension - - def get_editor(self) -> str: - if self.editor is not None: - return self.editor - for key in "VISUAL", "EDITOR": - rv = os.environ.get(key) - if rv: - return rv - if WIN: - return "notepad" - for editor in "sensible-editor", "vim", "nano": - if os.system(f"which {editor} >/dev/null 2>&1") == 0: - return editor - return "vi" - - def edit_file(self, filename: str) -> None: - import subprocess - - editor = self.get_editor() - environ: t.Optional[t.Dict[str, str]] = None - - if self.env: - environ = os.environ.copy() - environ.update(self.env) - - try: - c = subprocess.Popen(f'{editor} "{filename}"', env=environ, shell=True) - exit_code = c.wait() - if exit_code != 0: - raise ClickException( - _("{editor}: Editing failed").format(editor=editor) - ) - except OSError as e: - raise ClickException( - _("{editor}: Editing failed: {e}").format(editor=editor, e=e) - ) from e - - def edit(self, text: t.Optional[t.AnyStr]) -> t.Optional[t.AnyStr]: - import tempfile - - if not text: - data = b"" - elif isinstance(text, (bytes, bytearray)): - data = text - else: - if text and not text.endswith("\n"): - text += "\n" - - if WIN: - data = text.replace("\n", "\r\n").encode("utf-8-sig") - else: - data = text.encode("utf-8") - - fd, name = tempfile.mkstemp(prefix="editor-", suffix=self.extension) - f: t.BinaryIO - - try: - with os.fdopen(fd, "wb") as f: - f.write(data) - - # If the filesystem resolution is 1 second, like Mac OS - # 10.12 Extended, or 2 seconds, like FAT32, and the editor - # closes very fast, require_save can fail. Set the modified - # time to be 2 seconds in the past to work around this. - os.utime(name, (os.path.getatime(name), os.path.getmtime(name) - 2)) - # Depending on the resolution, the exact value might not be - # recorded, so get the new recorded value. - timestamp = os.path.getmtime(name) - - self.edit_file(name) - - if self.require_save and os.path.getmtime(name) == timestamp: - return None - - with open(name, "rb") as f: - rv = f.read() - - if isinstance(text, (bytes, bytearray)): - return rv - - return rv.decode("utf-8-sig").replace("\r\n", "\n") # type: ignore - finally: - os.unlink(name) - - -def open_url(url: str, wait: bool = False, locate: bool = False) -> int: - import subprocess - - def _unquote_file(url: str) -> str: - from urllib.parse import unquote - - if url.startswith("file://"): - url = unquote(url[7:]) - - return url - - if sys.platform == "darwin": - args = ["open"] - if wait: - args.append("-W") - if locate: - args.append("-R") - args.append(_unquote_file(url)) - null = open("/dev/null", "w") - try: - return subprocess.Popen(args, stderr=null).wait() - finally: - null.close() - elif WIN: - if locate: - url = _unquote_file(url.replace('"', "")) - args = f'explorer /select,"{url}"' - else: - url = url.replace('"', "") - wait_str = "/WAIT" if wait else "" - args = f'start {wait_str} "" "{url}"' - return os.system(args) - elif CYGWIN: - if locate: - url = os.path.dirname(_unquote_file(url).replace('"', "")) - args = f'cygstart "{url}"' - else: - url = url.replace('"', "") - wait_str = "-w" if wait else "" - args = f'cygstart {wait_str} "{url}"' - return os.system(args) - - try: - if locate: - url = os.path.dirname(_unquote_file(url)) or "." - else: - url = _unquote_file(url) - c = subprocess.Popen(["xdg-open", url]) - if wait: - return c.wait() - return 0 - except OSError: - if url.startswith(("http://", "https://")) and not locate and not wait: - import webbrowser - - webbrowser.open(url) - return 0 - return 1 - - -def _translate_ch_to_exc(ch: str) -> t.Optional[BaseException]: - if ch == "\x03": - raise KeyboardInterrupt() - - if ch == "\x04" and not WIN: # Unix-like, Ctrl+D - raise EOFError() - - if ch == "\x1a" and WIN: # Windows, Ctrl+Z - raise EOFError() - - return None - - -if WIN: - import msvcrt - - @contextlib.contextmanager - def raw_terminal() -> t.Iterator[int]: - yield -1 - - def getchar(echo: bool) -> str: - # The function `getch` will return a bytes object corresponding to - # the pressed character. Since Windows 10 build 1803, it will also - # return \x00 when called a second time after pressing a regular key. - # - # `getwch` does not share this probably-bugged behavior. Moreover, it - # returns a Unicode object by default, which is what we want. - # - # Either of these functions will return \x00 or \xe0 to indicate - # a special key, and you need to call the same function again to get - # the "rest" of the code. The fun part is that \u00e0 is - # "latin small letter a with grave", so if you type that on a French - # keyboard, you _also_ get a \xe0. - # E.g., consider the Up arrow. This returns \xe0 and then \x48. The - # resulting Unicode string reads as "a with grave" + "capital H". - # This is indistinguishable from when the user actually types - # "a with grave" and then "capital H". - # - # When \xe0 is returned, we assume it's part of a special-key sequence - # and call `getwch` again, but that means that when the user types - # the \u00e0 character, `getchar` doesn't return until a second - # character is typed. - # The alternative is returning immediately, but that would mess up - # cross-platform handling of arrow keys and others that start with - # \xe0. Another option is using `getch`, but then we can't reliably - # read non-ASCII characters, because return values of `getch` are - # limited to the current 8-bit codepage. - # - # Anyway, Click doesn't claim to do this Right(tm), and using `getwch` - # is doing the right thing in more situations than with `getch`. - func: t.Callable[[], str] - - if echo: - func = msvcrt.getwche # type: ignore - else: - func = msvcrt.getwch # type: ignore - - rv = func() - - if rv in ("\x00", "\xe0"): - # \x00 and \xe0 are control characters that indicate special key, - # see above. - rv += func() - - _translate_ch_to_exc(rv) - return rv - -else: - import tty - import termios - - @contextlib.contextmanager - def raw_terminal() -> t.Iterator[int]: - f: t.Optional[t.TextIO] - fd: int - - if not isatty(sys.stdin): - f = open("/dev/tty") - fd = f.fileno() - else: - fd = sys.stdin.fileno() - f = None - - try: - old_settings = termios.tcgetattr(fd) - - try: - tty.setraw(fd) - yield fd - finally: - termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) - sys.stdout.flush() - - if f is not None: - f.close() - except termios.error: - pass - - def getchar(echo: bool) -> str: - with raw_terminal() as fd: - ch = os.read(fd, 32).decode(get_best_encoding(sys.stdin), "replace") - - if echo and isatty(sys.stdout): - sys.stdout.write(ch) - - _translate_ch_to_exc(ch) - return ch diff --git a/myenv/Lib/site-packages/click/_textwrap.py b/myenv/Lib/site-packages/click/_textwrap.py deleted file mode 100644 index b47dcbd..0000000 --- a/myenv/Lib/site-packages/click/_textwrap.py +++ /dev/null @@ -1,49 +0,0 @@ -import textwrap -import typing as t -from contextlib import contextmanager - - -class TextWrapper(textwrap.TextWrapper): - def _handle_long_word( - self, - reversed_chunks: t.List[str], - cur_line: t.List[str], - cur_len: int, - width: int, - ) -> None: - space_left = max(width - cur_len, 1) - - if self.break_long_words: - last = reversed_chunks[-1] - cut = last[:space_left] - res = last[space_left:] - cur_line.append(cut) - reversed_chunks[-1] = res - elif not cur_line: - cur_line.append(reversed_chunks.pop()) - - @contextmanager - def extra_indent(self, indent: str) -> t.Iterator[None]: - old_initial_indent = self.initial_indent - old_subsequent_indent = self.subsequent_indent - self.initial_indent += indent - self.subsequent_indent += indent - - try: - yield - finally: - self.initial_indent = old_initial_indent - self.subsequent_indent = old_subsequent_indent - - def indent_only(self, text: str) -> str: - rv = [] - - for idx, line in enumerate(text.splitlines()): - indent = self.initial_indent - - if idx > 0: - indent = self.subsequent_indent - - rv.append(f"{indent}{line}") - - return "\n".join(rv) diff --git a/myenv/Lib/site-packages/click/_winconsole.py b/myenv/Lib/site-packages/click/_winconsole.py deleted file mode 100644 index 6b20df3..0000000 --- a/myenv/Lib/site-packages/click/_winconsole.py +++ /dev/null @@ -1,279 +0,0 @@ -# This module is based on the excellent work by Adam Bartoš who -# provided a lot of what went into the implementation here in -# the discussion to issue1602 in the Python bug tracker. -# -# There are some general differences in regards to how this works -# compared to the original patches as we do not need to patch -# the entire interpreter but just work in our little world of -# echo and prompt. -import io -import sys -import time -import typing as t -from ctypes import byref -from ctypes import c_char -from ctypes import c_char_p -from ctypes import c_int -from ctypes import c_ssize_t -from ctypes import c_ulong -from ctypes import c_void_p -from ctypes import POINTER -from ctypes import py_object -from ctypes import Structure -from ctypes.wintypes import DWORD -from ctypes.wintypes import HANDLE -from ctypes.wintypes import LPCWSTR -from ctypes.wintypes import LPWSTR - -from ._compat import _NonClosingTextIOWrapper - -assert sys.platform == "win32" -import msvcrt # noqa: E402 -from ctypes import windll # noqa: E402 -from ctypes import WINFUNCTYPE # noqa: E402 - -c_ssize_p = POINTER(c_ssize_t) - -kernel32 = windll.kernel32 -GetStdHandle = kernel32.GetStdHandle -ReadConsoleW = kernel32.ReadConsoleW -WriteConsoleW = kernel32.WriteConsoleW -GetConsoleMode = kernel32.GetConsoleMode -GetLastError = kernel32.GetLastError -GetCommandLineW = WINFUNCTYPE(LPWSTR)(("GetCommandLineW", windll.kernel32)) -CommandLineToArgvW = WINFUNCTYPE(POINTER(LPWSTR), LPCWSTR, POINTER(c_int))( - ("CommandLineToArgvW", windll.shell32) -) -LocalFree = WINFUNCTYPE(c_void_p, c_void_p)(("LocalFree", windll.kernel32)) - -STDIN_HANDLE = GetStdHandle(-10) -STDOUT_HANDLE = GetStdHandle(-11) -STDERR_HANDLE = GetStdHandle(-12) - -PyBUF_SIMPLE = 0 -PyBUF_WRITABLE = 1 - -ERROR_SUCCESS = 0 -ERROR_NOT_ENOUGH_MEMORY = 8 -ERROR_OPERATION_ABORTED = 995 - -STDIN_FILENO = 0 -STDOUT_FILENO = 1 -STDERR_FILENO = 2 - -EOF = b"\x1a" -MAX_BYTES_WRITTEN = 32767 - -try: - from ctypes import pythonapi -except ImportError: - # On PyPy we cannot get buffers so our ability to operate here is - # severely limited. - get_buffer = None -else: - - class Py_buffer(Structure): - _fields_ = [ - ("buf", c_void_p), - ("obj", py_object), - ("len", c_ssize_t), - ("itemsize", c_ssize_t), - ("readonly", c_int), - ("ndim", c_int), - ("format", c_char_p), - ("shape", c_ssize_p), - ("strides", c_ssize_p), - ("suboffsets", c_ssize_p), - ("internal", c_void_p), - ] - - PyObject_GetBuffer = pythonapi.PyObject_GetBuffer - PyBuffer_Release = pythonapi.PyBuffer_Release - - def get_buffer(obj, writable=False): - buf = Py_buffer() - flags = PyBUF_WRITABLE if writable else PyBUF_SIMPLE - PyObject_GetBuffer(py_object(obj), byref(buf), flags) - - try: - buffer_type = c_char * buf.len - return buffer_type.from_address(buf.buf) - finally: - PyBuffer_Release(byref(buf)) - - -class _WindowsConsoleRawIOBase(io.RawIOBase): - def __init__(self, handle): - self.handle = handle - - def isatty(self): - super().isatty() - return True - - -class _WindowsConsoleReader(_WindowsConsoleRawIOBase): - def readable(self): - return True - - def readinto(self, b): - bytes_to_be_read = len(b) - if not bytes_to_be_read: - return 0 - elif bytes_to_be_read % 2: - raise ValueError( - "cannot read odd number of bytes from UTF-16-LE encoded console" - ) - - buffer = get_buffer(b, writable=True) - code_units_to_be_read = bytes_to_be_read // 2 - code_units_read = c_ulong() - - rv = ReadConsoleW( - HANDLE(self.handle), - buffer, - code_units_to_be_read, - byref(code_units_read), - None, - ) - if GetLastError() == ERROR_OPERATION_ABORTED: - # wait for KeyboardInterrupt - time.sleep(0.1) - if not rv: - raise OSError(f"Windows error: {GetLastError()}") - - if buffer[0] == EOF: - return 0 - return 2 * code_units_read.value - - -class _WindowsConsoleWriter(_WindowsConsoleRawIOBase): - def writable(self): - return True - - @staticmethod - def _get_error_message(errno): - if errno == ERROR_SUCCESS: - return "ERROR_SUCCESS" - elif errno == ERROR_NOT_ENOUGH_MEMORY: - return "ERROR_NOT_ENOUGH_MEMORY" - return f"Windows error {errno}" - - def write(self, b): - bytes_to_be_written = len(b) - buf = get_buffer(b) - code_units_to_be_written = min(bytes_to_be_written, MAX_BYTES_WRITTEN) // 2 - code_units_written = c_ulong() - - WriteConsoleW( - HANDLE(self.handle), - buf, - code_units_to_be_written, - byref(code_units_written), - None, - ) - bytes_written = 2 * code_units_written.value - - if bytes_written == 0 and bytes_to_be_written > 0: - raise OSError(self._get_error_message(GetLastError())) - return bytes_written - - -class ConsoleStream: - def __init__(self, text_stream: t.TextIO, byte_stream: t.BinaryIO) -> None: - self._text_stream = text_stream - self.buffer = byte_stream - - @property - def name(self) -> str: - return self.buffer.name - - def write(self, x: t.AnyStr) -> int: - if isinstance(x, str): - return self._text_stream.write(x) - try: - self.flush() - except Exception: - pass - return self.buffer.write(x) - - def writelines(self, lines: t.Iterable[t.AnyStr]) -> None: - for line in lines: - self.write(line) - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._text_stream, name) - - def isatty(self) -> bool: - return self.buffer.isatty() - - def __repr__(self): - return f"" - - -def _get_text_stdin(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedReader(_WindowsConsoleReader(STDIN_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stdout(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDOUT_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -def _get_text_stderr(buffer_stream: t.BinaryIO) -> t.TextIO: - text_stream = _NonClosingTextIOWrapper( - io.BufferedWriter(_WindowsConsoleWriter(STDERR_HANDLE)), - "utf-16-le", - "strict", - line_buffering=True, - ) - return t.cast(t.TextIO, ConsoleStream(text_stream, buffer_stream)) - - -_stream_factories: t.Mapping[int, t.Callable[[t.BinaryIO], t.TextIO]] = { - 0: _get_text_stdin, - 1: _get_text_stdout, - 2: _get_text_stderr, -} - - -def _is_console(f: t.TextIO) -> bool: - if not hasattr(f, "fileno"): - return False - - try: - fileno = f.fileno() - except (OSError, io.UnsupportedOperation): - return False - - handle = msvcrt.get_osfhandle(fileno) - return bool(GetConsoleMode(handle, byref(DWORD()))) - - -def _get_windows_console_stream( - f: t.TextIO, encoding: t.Optional[str], errors: t.Optional[str] -) -> t.Optional[t.TextIO]: - if ( - get_buffer is not None - and encoding in {"utf-16-le", None} - and errors in {"strict", None} - and _is_console(f) - ): - func = _stream_factories.get(f.fileno()) - if func is not None: - b = getattr(f, "buffer", None) - - if b is None: - return None - - return func(b) diff --git a/myenv/Lib/site-packages/click/core.py b/myenv/Lib/site-packages/click/core.py deleted file mode 100644 index cc65e89..0000000 --- a/myenv/Lib/site-packages/click/core.py +++ /dev/null @@ -1,3042 +0,0 @@ -import enum -import errno -import inspect -import os -import sys -import typing as t -from collections import abc -from contextlib import contextmanager -from contextlib import ExitStack -from functools import update_wrapper -from gettext import gettext as _ -from gettext import ngettext -from itertools import repeat -from types import TracebackType - -from . import types -from .exceptions import Abort -from .exceptions import BadParameter -from .exceptions import ClickException -from .exceptions import Exit -from .exceptions import MissingParameter -from .exceptions import UsageError -from .formatting import HelpFormatter -from .formatting import join_options -from .globals import pop_context -from .globals import push_context -from .parser import _flag_needs_value -from .parser import OptionParser -from .parser import split_opt -from .termui import confirm -from .termui import prompt -from .termui import style -from .utils import _detect_program_name -from .utils import _expand_args -from .utils import echo -from .utils import make_default_short_help -from .utils import make_str -from .utils import PacifyFlushWrapper - -if t.TYPE_CHECKING: - import typing_extensions as te - from .shell_completion import CompletionItem - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -V = t.TypeVar("V") - - -def _complete_visible_commands( - ctx: "Context", incomplete: str -) -> t.Iterator[t.Tuple[str, "Command"]]: - """List all the subcommands of a group that start with the - incomplete value and aren't hidden. - - :param ctx: Invocation context for the group. - :param incomplete: Value being completed. May be empty. - """ - multi = t.cast(MultiCommand, ctx.command) - - for name in multi.list_commands(ctx): - if name.startswith(incomplete): - command = multi.get_command(ctx, name) - - if command is not None and not command.hidden: - yield name, command - - -def _check_multicommand( - base_command: "MultiCommand", cmd_name: str, cmd: "Command", register: bool = False -) -> None: - if not base_command.chain or not isinstance(cmd, MultiCommand): - return - if register: - hint = ( - "It is not possible to add multi commands as children to" - " another multi command that is in chain mode." - ) - else: - hint = ( - "Found a multi command as subcommand to a multi command" - " that is in chain mode. This is not supported." - ) - raise RuntimeError( - f"{hint}. Command {base_command.name!r} is set to chain and" - f" {cmd_name!r} was added as a subcommand but it in itself is a" - f" multi command. ({cmd_name!r} is a {type(cmd).__name__}" - f" within a chained {type(base_command).__name__} named" - f" {base_command.name!r})." - ) - - -def batch(iterable: t.Iterable[V], batch_size: int) -> t.List[t.Tuple[V, ...]]: - return list(zip(*repeat(iter(iterable), batch_size))) - - -@contextmanager -def augment_usage_errors( - ctx: "Context", param: t.Optional["Parameter"] = None -) -> t.Iterator[None]: - """Context manager that attaches extra information to exceptions.""" - try: - yield - except BadParameter as e: - if e.ctx is None: - e.ctx = ctx - if param is not None and e.param is None: - e.param = param - raise - except UsageError as e: - if e.ctx is None: - e.ctx = ctx - raise - - -def iter_params_for_processing( - invocation_order: t.Sequence["Parameter"], - declaration_order: t.Sequence["Parameter"], -) -> t.List["Parameter"]: - """Given a sequence of parameters in the order as should be considered - for processing and an iterable of parameters that exist, this returns - a list in the correct order as they should be processed. - """ - - def sort_key(item: "Parameter") -> t.Tuple[bool, float]: - try: - idx: float = invocation_order.index(item) - except ValueError: - idx = float("inf") - - return not item.is_eager, idx - - return sorted(declaration_order, key=sort_key) - - -class ParameterSource(enum.Enum): - """This is an :class:`~enum.Enum` that indicates the source of a - parameter's value. - - Use :meth:`click.Context.get_parameter_source` to get the - source for a parameter by name. - - .. versionchanged:: 8.0 - Use :class:`~enum.Enum` and drop the ``validate`` method. - - .. versionchanged:: 8.0 - Added the ``PROMPT`` value. - """ - - COMMANDLINE = enum.auto() - """The value was provided by the command line args.""" - ENVIRONMENT = enum.auto() - """The value was provided with an environment variable.""" - DEFAULT = enum.auto() - """Used the default specified by the parameter.""" - DEFAULT_MAP = enum.auto() - """Used a default provided by :attr:`Context.default_map`.""" - PROMPT = enum.auto() - """Used a prompt to confirm a default or provide a value.""" - - -class Context: - """The context is a special internal object that holds state relevant - for the script execution at every single level. It's normally invisible - to commands unless they opt-in to getting access to it. - - The context is useful as it can pass internal objects around and can - control special execution features such as reading data from - environment variables. - - A context can be used as context manager in which case it will call - :meth:`close` on teardown. - - :param command: the command class for this context. - :param parent: the parent context. - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it is usually - the name of the script, for commands below it it's - the name of the script. - :param obj: an arbitrary object of user data. - :param auto_envvar_prefix: the prefix to use for automatic environment - variables. If this is `None` then reading - from environment variables is disabled. This - does not affect manually set environment - variables which are always read. - :param default_map: a dictionary (like object) with default values - for parameters. - :param terminal_width: the width of the terminal. The default is - inherit from parent context. If no context - defines the terminal width then auto - detection will be applied. - :param max_content_width: the maximum width for content rendered by - Click (this currently only affects help - pages). This defaults to 80 characters if - not overridden. In other words: even if the - terminal is larger than that, Click will not - format things wider than 80 characters by - default. In addition to that, formatters might - add some safety mapping on the right. - :param resilient_parsing: if this flag is enabled then Click will - parse without any interactivity or callback - invocation. Default values will also be - ignored. This is useful for implementing - things such as completion support. - :param allow_extra_args: if this is set to `True` then extra arguments - at the end will not raise an error and will be - kept on the context. The default is to inherit - from the command. - :param allow_interspersed_args: if this is set to `False` then options - and arguments cannot be mixed. The - default is to inherit from the command. - :param ignore_unknown_options: instructs click to ignore options it does - not know and keeps them for later - processing. - :param help_option_names: optionally a list of strings that define how - the default help parameter is named. The - default is ``['--help']``. - :param token_normalize_func: an optional function that is used to - normalize tokens (options, choices, - etc.). This for instance can be used to - implement case insensitive behavior. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are used in texts that Click prints which is by - default not the case. This for instance would affect - help output. - :param show_default: Show the default value for commands. If this - value is not set, it defaults to the value from the parent - context. ``Command.show_default`` overrides this default for the - specific command. - - .. versionchanged:: 8.1 - The ``show_default`` parameter is overridden by - ``Command.show_default``, instead of the other way around. - - .. versionchanged:: 8.0 - The ``show_default`` parameter defaults to the value from the - parent context. - - .. versionchanged:: 7.1 - Added the ``show_default`` parameter. - - .. versionchanged:: 4.0 - Added the ``color``, ``ignore_unknown_options``, and - ``max_content_width`` parameters. - - .. versionchanged:: 3.0 - Added the ``allow_extra_args`` and ``allow_interspersed_args`` - parameters. - - .. versionchanged:: 2.0 - Added the ``resilient_parsing``, ``help_option_names``, and - ``token_normalize_func`` parameters. - """ - - #: The formatter class to create with :meth:`make_formatter`. - #: - #: .. versionadded:: 8.0 - formatter_class: t.Type["HelpFormatter"] = HelpFormatter - - def __init__( - self, - command: "Command", - parent: t.Optional["Context"] = None, - info_name: t.Optional[str] = None, - obj: t.Optional[t.Any] = None, - auto_envvar_prefix: t.Optional[str] = None, - default_map: t.Optional[t.MutableMapping[str, t.Any]] = None, - terminal_width: t.Optional[int] = None, - max_content_width: t.Optional[int] = None, - resilient_parsing: bool = False, - allow_extra_args: t.Optional[bool] = None, - allow_interspersed_args: t.Optional[bool] = None, - ignore_unknown_options: t.Optional[bool] = None, - help_option_names: t.Optional[t.List[str]] = None, - token_normalize_func: t.Optional[t.Callable[[str], str]] = None, - color: t.Optional[bool] = None, - show_default: t.Optional[bool] = None, - ) -> None: - #: the parent context or `None` if none exists. - self.parent = parent - #: the :class:`Command` for this context. - self.command = command - #: the descriptive information name - self.info_name = info_name - #: Map of parameter names to their parsed values. Parameters - #: with ``expose_value=False`` are not stored. - self.params: t.Dict[str, t.Any] = {} - #: the leftover arguments. - self.args: t.List[str] = [] - #: protected arguments. These are arguments that are prepended - #: to `args` when certain parsing scenarios are encountered but - #: must be never propagated to another arguments. This is used - #: to implement nested parsing. - self.protected_args: t.List[str] = [] - #: the collected prefixes of the command's options. - self._opt_prefixes: t.Set[str] = set(parent._opt_prefixes) if parent else set() - - if obj is None and parent is not None: - obj = parent.obj - - #: the user object stored. - self.obj: t.Any = obj - self._meta: t.Dict[str, t.Any] = getattr(parent, "meta", {}) - - #: A dictionary (-like object) with defaults for parameters. - if ( - default_map is None - and info_name is not None - and parent is not None - and parent.default_map is not None - ): - default_map = parent.default_map.get(info_name) - - self.default_map: t.Optional[t.MutableMapping[str, t.Any]] = default_map - - #: This flag indicates if a subcommand is going to be executed. A - #: group callback can use this information to figure out if it's - #: being executed directly or because the execution flow passes - #: onwards to a subcommand. By default it's None, but it can be - #: the name of the subcommand to execute. - #: - #: If chaining is enabled this will be set to ``'*'`` in case - #: any commands are executed. It is however not possible to - #: figure out which ones. If you require this knowledge you - #: should use a :func:`result_callback`. - self.invoked_subcommand: t.Optional[str] = None - - if terminal_width is None and parent is not None: - terminal_width = parent.terminal_width - - #: The width of the terminal (None is autodetection). - self.terminal_width: t.Optional[int] = terminal_width - - if max_content_width is None and parent is not None: - max_content_width = parent.max_content_width - - #: The maximum width of formatted content (None implies a sensible - #: default which is 80 for most things). - self.max_content_width: t.Optional[int] = max_content_width - - if allow_extra_args is None: - allow_extra_args = command.allow_extra_args - - #: Indicates if the context allows extra args or if it should - #: fail on parsing. - #: - #: .. versionadded:: 3.0 - self.allow_extra_args = allow_extra_args - - if allow_interspersed_args is None: - allow_interspersed_args = command.allow_interspersed_args - - #: Indicates if the context allows mixing of arguments and - #: options or not. - #: - #: .. versionadded:: 3.0 - self.allow_interspersed_args: bool = allow_interspersed_args - - if ignore_unknown_options is None: - ignore_unknown_options = command.ignore_unknown_options - - #: Instructs click to ignore options that a command does not - #: understand and will store it on the context for later - #: processing. This is primarily useful for situations where you - #: want to call into external programs. Generally this pattern is - #: strongly discouraged because it's not possibly to losslessly - #: forward all arguments. - #: - #: .. versionadded:: 4.0 - self.ignore_unknown_options: bool = ignore_unknown_options - - if help_option_names is None: - if parent is not None: - help_option_names = parent.help_option_names - else: - help_option_names = ["--help"] - - #: The names for the help options. - self.help_option_names: t.List[str] = help_option_names - - if token_normalize_func is None and parent is not None: - token_normalize_func = parent.token_normalize_func - - #: An optional normalization function for tokens. This is - #: options, choices, commands etc. - self.token_normalize_func: t.Optional[ - t.Callable[[str], str] - ] = token_normalize_func - - #: Indicates if resilient parsing is enabled. In that case Click - #: will do its best to not cause any failures and default values - #: will be ignored. Useful for completion. - self.resilient_parsing: bool = resilient_parsing - - # If there is no envvar prefix yet, but the parent has one and - # the command on this level has a name, we can expand the envvar - # prefix automatically. - if auto_envvar_prefix is None: - if ( - parent is not None - and parent.auto_envvar_prefix is not None - and self.info_name is not None - ): - auto_envvar_prefix = ( - f"{parent.auto_envvar_prefix}_{self.info_name.upper()}" - ) - else: - auto_envvar_prefix = auto_envvar_prefix.upper() - - if auto_envvar_prefix is not None: - auto_envvar_prefix = auto_envvar_prefix.replace("-", "_") - - self.auto_envvar_prefix: t.Optional[str] = auto_envvar_prefix - - if color is None and parent is not None: - color = parent.color - - #: Controls if styling output is wanted or not. - self.color: t.Optional[bool] = color - - if show_default is None and parent is not None: - show_default = parent.show_default - - #: Show option default values when formatting help text. - self.show_default: t.Optional[bool] = show_default - - self._close_callbacks: t.List[t.Callable[[], t.Any]] = [] - self._depth = 0 - self._parameter_source: t.Dict[str, ParameterSource] = {} - self._exit_stack = ExitStack() - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire CLI - structure. - - .. code-block:: python - - with Context(cli) as ctx: - info = ctx.to_info_dict() - - .. versionadded:: 8.0 - """ - return { - "command": self.command.to_info_dict(self), - "info_name": self.info_name, - "allow_extra_args": self.allow_extra_args, - "allow_interspersed_args": self.allow_interspersed_args, - "ignore_unknown_options": self.ignore_unknown_options, - "auto_envvar_prefix": self.auto_envvar_prefix, - } - - def __enter__(self) -> "Context": - self._depth += 1 - push_context(self) - return self - - def __exit__( - self, - exc_type: t.Optional[t.Type[BaseException]], - exc_value: t.Optional[BaseException], - tb: t.Optional[TracebackType], - ) -> None: - self._depth -= 1 - if self._depth == 0: - self.close() - pop_context() - - @contextmanager - def scope(self, cleanup: bool = True) -> t.Iterator["Context"]: - """This helper method can be used with the context object to promote - it to the current thread local (see :func:`get_current_context`). - The default behavior of this is to invoke the cleanup functions which - can be disabled by setting `cleanup` to `False`. The cleanup - functions are typically used for things such as closing file handles. - - If the cleanup is intended the context object can also be directly - used as a context manager. - - Example usage:: - - with ctx.scope(): - assert get_current_context() is ctx - - This is equivalent:: - - with ctx: - assert get_current_context() is ctx - - .. versionadded:: 5.0 - - :param cleanup: controls if the cleanup functions should be run or - not. The default is to run these functions. In - some situations the context only wants to be - temporarily pushed in which case this can be disabled. - Nested pushes automatically defer the cleanup. - """ - if not cleanup: - self._depth += 1 - try: - with self as rv: - yield rv - finally: - if not cleanup: - self._depth -= 1 - - @property - def meta(self) -> t.Dict[str, t.Any]: - """This is a dictionary which is shared with all the contexts - that are nested. It exists so that click utilities can store some - state here if they need to. It is however the responsibility of - that code to manage this dictionary well. - - The keys are supposed to be unique dotted strings. For instance - module paths are a good choice for it. What is stored in there is - irrelevant for the operation of click. However what is important is - that code that places data here adheres to the general semantics of - the system. - - Example usage:: - - LANG_KEY = f'{__name__}.lang' - - def set_language(value): - ctx = get_current_context() - ctx.meta[LANG_KEY] = value - - def get_language(): - return get_current_context().meta.get(LANG_KEY, 'en_US') - - .. versionadded:: 5.0 - """ - return self._meta - - def make_formatter(self) -> HelpFormatter: - """Creates the :class:`~click.HelpFormatter` for the help and - usage output. - - To quickly customize the formatter class used without overriding - this method, set the :attr:`formatter_class` attribute. - - .. versionchanged:: 8.0 - Added the :attr:`formatter_class` attribute. - """ - return self.formatter_class( - width=self.terminal_width, max_width=self.max_content_width - ) - - def with_resource(self, context_manager: t.ContextManager[V]) -> V: - """Register a resource as if it were used in a ``with`` - statement. The resource will be cleaned up when the context is - popped. - - Uses :meth:`contextlib.ExitStack.enter_context`. It calls the - resource's ``__enter__()`` method and returns the result. When - the context is popped, it closes the stack, which calls the - resource's ``__exit__()`` method. - - To register a cleanup function for something that isn't a - context manager, use :meth:`call_on_close`. Or use something - from :mod:`contextlib` to turn it into a context manager first. - - .. code-block:: python - - @click.group() - @click.option("--name") - @click.pass_context - def cli(ctx): - ctx.obj = ctx.with_resource(connect_db(name)) - - :param context_manager: The context manager to enter. - :return: Whatever ``context_manager.__enter__()`` returns. - - .. versionadded:: 8.0 - """ - return self._exit_stack.enter_context(context_manager) - - def call_on_close(self, f: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Register a function to be called when the context tears down. - - This can be used to close resources opened during the script - execution. Resources that support Python's context manager - protocol which would be used in a ``with`` statement should be - registered with :meth:`with_resource` instead. - - :param f: The function to execute on teardown. - """ - return self._exit_stack.callback(f) - - def close(self) -> None: - """Invoke all close callbacks registered with - :meth:`call_on_close`, and exit all context managers entered - with :meth:`with_resource`. - """ - self._exit_stack.close() - # In case the context is reused, create a new exit stack. - self._exit_stack = ExitStack() - - @property - def command_path(self) -> str: - """The computed command path. This is used for the ``usage`` - information on the help page. It's automatically created by - combining the info names of the chain of contexts to the root. - """ - rv = "" - if self.info_name is not None: - rv = self.info_name - if self.parent is not None: - parent_command_path = [self.parent.command_path] - - if isinstance(self.parent.command, Command): - for param in self.parent.command.get_params(self): - parent_command_path.extend(param.get_usage_pieces(self)) - - rv = f"{' '.join(parent_command_path)} {rv}" - return rv.lstrip() - - def find_root(self) -> "Context": - """Finds the outermost context.""" - node = self - while node.parent is not None: - node = node.parent - return node - - def find_object(self, object_type: t.Type[V]) -> t.Optional[V]: - """Finds the closest object of a given type.""" - node: t.Optional["Context"] = self - - while node is not None: - if isinstance(node.obj, object_type): - return node.obj - - node = node.parent - - return None - - def ensure_object(self, object_type: t.Type[V]) -> V: - """Like :meth:`find_object` but sets the innermost object to a - new instance of `object_type` if it does not exist. - """ - rv = self.find_object(object_type) - if rv is None: - self.obj = rv = object_type() - return rv - - @t.overload - def lookup_default( - self, name: str, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @t.overload - def lookup_default( - self, name: str, call: "te.Literal[False]" = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def lookup_default(self, name: str, call: bool = True) -> t.Optional[t.Any]: - """Get the default for a parameter from :attr:`default_map`. - - :param name: Name of the parameter. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - if self.default_map is not None: - value = self.default_map.get(name) - - if call and callable(value): - return value() - - return value - - return None - - def fail(self, message: str) -> "te.NoReturn": - """Aborts the execution of the program with a specific error - message. - - :param message: the error message to fail with. - """ - raise UsageError(message, self) - - def abort(self) -> "te.NoReturn": - """Aborts the script.""" - raise Abort() - - def exit(self, code: int = 0) -> "te.NoReturn": - """Exits the application with a given exit code.""" - raise Exit(code) - - def get_usage(self) -> str: - """Helper method to get formatted usage string for the current - context and command. - """ - return self.command.get_usage(self) - - def get_help(self) -> str: - """Helper method to get formatted help page for the current - context and command. - """ - return self.command.get_help(self) - - def _make_sub_context(self, command: "Command") -> "Context": - """Create a new context of the same type as this context, but - for a new command. - - :meta private: - """ - return type(self)(command, info_name=command.name, parent=self) - - @t.overload - def invoke( - __self, # noqa: B902 - __callback: "t.Callable[..., V]", - *args: t.Any, - **kwargs: t.Any, - ) -> V: - ... - - @t.overload - def invoke( - __self, # noqa: B902 - __callback: "Command", - *args: t.Any, - **kwargs: t.Any, - ) -> t.Any: - ... - - def invoke( - __self, # noqa: B902 - __callback: t.Union["Command", "t.Callable[..., V]"], - *args: t.Any, - **kwargs: t.Any, - ) -> t.Union[t.Any, V]: - """Invokes a command callback in exactly the way it expects. There - are two ways to invoke this method: - - 1. the first argument can be a callback and all other arguments and - keyword arguments are forwarded directly to the function. - 2. the first argument is a click command object. In that case all - arguments are forwarded as well but proper click parameters - (options and click arguments) must be keyword arguments and Click - will fill in defaults. - - Note that before Click 3.2 keyword arguments were not properly filled - in against the intention of this code and no context was created. For - more information about this change and why it was done in a bugfix - release see :ref:`upgrade-to-3.2`. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if :meth:`forward` is called at multiple levels. - """ - if isinstance(__callback, Command): - other_cmd = __callback - - if other_cmd.callback is None: - raise TypeError( - "The given command does not have a callback that can be invoked." - ) - else: - __callback = t.cast("t.Callable[..., V]", other_cmd.callback) - - ctx = __self._make_sub_context(other_cmd) - - for param in other_cmd.params: - if param.name not in kwargs and param.expose_value: - kwargs[param.name] = param.type_cast_value( # type: ignore - ctx, param.get_default(ctx) - ) - - # Track all kwargs as params, so that forward() will pass - # them on in subsequent calls. - ctx.params.update(kwargs) - else: - ctx = __self - - with augment_usage_errors(__self): - with ctx: - return __callback(*args, **kwargs) - - def forward( - __self, __cmd: "Command", *args: t.Any, **kwargs: t.Any # noqa: B902 - ) -> t.Any: - """Similar to :meth:`invoke` but fills in default keyword - arguments from the current context if the other command expects - it. This cannot invoke callbacks directly, only other commands. - - .. versionchanged:: 8.0 - All ``kwargs`` are tracked in :attr:`params` so they will be - passed if ``forward`` is called at multiple levels. - """ - # Can only forward to other commands, not direct callbacks. - if not isinstance(__cmd, Command): - raise TypeError("Callback is not a command.") - - for param in __self.params: - if param not in kwargs: - kwargs[param] = __self.params[param] - - return __self.invoke(__cmd, *args, **kwargs) - - def set_parameter_source(self, name: str, source: ParameterSource) -> None: - """Set the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - :param name: The name of the parameter. - :param source: A member of :class:`~click.core.ParameterSource`. - """ - self._parameter_source[name] = source - - def get_parameter_source(self, name: str) -> t.Optional[ParameterSource]: - """Get the source of a parameter. This indicates the location - from which the value of the parameter was obtained. - - This can be useful for determining when a user specified a value - on the command line that is the same as the default value. It - will be :attr:`~click.core.ParameterSource.DEFAULT` only if the - value was actually taken from the default. - - :param name: The name of the parameter. - :rtype: ParameterSource - - .. versionchanged:: 8.0 - Returns ``None`` if the parameter was not provided from any - source. - """ - return self._parameter_source.get(name) - - -class BaseCommand: - """The base command implements the minimal API contract of commands. - Most code will never use this as it does not implement a lot of useful - functionality but it can act as the direct subclass of alternative - parsing methods that do not depend on the Click parser. - - For instance, this can be used to bridge Click and other systems like - argparse or docopt. - - Because base commands do not implement a lot of the API that other - parts of Click take for granted, they are not supported for all - operations. For instance, they cannot be used with the decorators - usually and they have no built-in callback system. - - .. versionchanged:: 2.0 - Added the `context_settings` parameter. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - """ - - #: The context class to create with :meth:`make_context`. - #: - #: .. versionadded:: 8.0 - context_class: t.Type[Context] = Context - #: the default for the :attr:`Context.allow_extra_args` flag. - allow_extra_args = False - #: the default for the :attr:`Context.allow_interspersed_args` flag. - allow_interspersed_args = True - #: the default for the :attr:`Context.ignore_unknown_options` flag. - ignore_unknown_options = False - - def __init__( - self, - name: t.Optional[str], - context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, - ) -> None: - #: the name the command thinks it has. Upon registering a command - #: on a :class:`Group` the group will default the command name - #: with this information. You should instead use the - #: :class:`Context`\'s :attr:`~Context.info_name` attribute. - self.name = name - - if context_settings is None: - context_settings = {} - - #: an optional dictionary with defaults passed to the context. - self.context_settings: t.MutableMapping[str, t.Any] = context_settings - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. This traverses the entire structure - below this command. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - :param ctx: A :class:`Context` representing this command. - - .. versionadded:: 8.0 - """ - return {"name": self.name} - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def get_usage(self, ctx: Context) -> str: - raise NotImplementedError("Base commands cannot get usage") - - def get_help(self, ctx: Context) -> str: - raise NotImplementedError("Base commands cannot get help") - - def make_context( - self, - info_name: t.Optional[str], - args: t.List[str], - parent: t.Optional[Context] = None, - **extra: t.Any, - ) -> Context: - """This function when given an info name and arguments will kick - off the parsing and create a new :class:`Context`. It does not - invoke the actual command callback though. - - To quickly customize the context class used without overriding - this method, set the :attr:`context_class` attribute. - - :param info_name: the info name for this invocation. Generally this - is the most descriptive name for the script or - command. For the toplevel script it's usually - the name of the script, for commands below it's - the name of the command. - :param args: the arguments to parse as list of strings. - :param parent: the parent context if available. - :param extra: extra keyword arguments forwarded to the context - constructor. - - .. versionchanged:: 8.0 - Added the :attr:`context_class` attribute. - """ - for key, value in self.context_settings.items(): - if key not in extra: - extra[key] = value - - ctx = self.context_class( - self, info_name=info_name, parent=parent, **extra # type: ignore - ) - - with ctx.scope(cleanup=False): - self.parse_args(ctx, args) - return ctx - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - """Given a context and a list of arguments this creates the parser - and parses the arguments, then modifies the context as necessary. - This is automatically invoked by :meth:`make_context`. - """ - raise NotImplementedError("Base commands do not know how to parse arguments.") - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the command. The default - implementation is raising a not implemented error. - """ - raise NotImplementedError("Base commands are not invocable by default") - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of chained multi-commands. - - Any command could be part of a chained multi-command, so sibling - commands are valid at any point during command completion. Other - command classes will return more completions. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: t.List["CompletionItem"] = [] - - while ctx.parent is not None: - ctx = ctx.parent - - if isinstance(ctx.command, MultiCommand) and ctx.command.chain: - results.extend( - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - if name not in ctx.protected_args - ) - - return results - - @t.overload - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: "te.Literal[True]" = True, - **extra: t.Any, - ) -> "te.NoReturn": - ... - - @t.overload - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: bool = ..., - **extra: t.Any, - ) -> t.Any: - ... - - def main( - self, - args: t.Optional[t.Sequence[str]] = None, - prog_name: t.Optional[str] = None, - complete_var: t.Optional[str] = None, - standalone_mode: bool = True, - windows_expand_args: bool = True, - **extra: t.Any, - ) -> t.Any: - """This is the way to invoke a script with all the bells and - whistles as a command line application. This will always terminate - the application after a call. If this is not wanted, ``SystemExit`` - needs to be caught. - - This method is also available by directly calling the instance of - a :class:`Command`. - - :param args: the arguments that should be used for parsing. If not - provided, ``sys.argv[1:]`` is used. - :param prog_name: the program name that should be used. By default - the program name is constructed by taking the file - name from ``sys.argv[0]``. - :param complete_var: the environment variable that controls the - bash completion support. The default is - ``"__COMPLETE"`` with prog_name in - uppercase. - :param standalone_mode: the default behavior is to invoke the script - in standalone mode. Click will then - handle exceptions and convert them into - error messages and the function will never - return but shut down the interpreter. If - this is set to `False` they will be - propagated to the caller and the return - value of this function is the return value - of :meth:`invoke`. - :param windows_expand_args: Expand glob patterns, user dir, and - env vars in command line args on Windows. - :param extra: extra keyword arguments are forwarded to the context - constructor. See :class:`Context` for more information. - - .. versionchanged:: 8.0.1 - Added the ``windows_expand_args`` parameter to allow - disabling command line arg expansion on Windows. - - .. versionchanged:: 8.0 - When taking arguments from ``sys.argv`` on Windows, glob - patterns, user dir, and env vars are expanded. - - .. versionchanged:: 3.0 - Added the ``standalone_mode`` parameter. - """ - if args is None: - args = sys.argv[1:] - - if os.name == "nt" and windows_expand_args: - args = _expand_args(args) - else: - args = list(args) - - if prog_name is None: - prog_name = _detect_program_name() - - # Process shell completion requests and exit early. - self._main_shell_completion(extra, prog_name, complete_var) - - try: - try: - with self.make_context(prog_name, args, **extra) as ctx: - rv = self.invoke(ctx) - if not standalone_mode: - return rv - # it's not safe to `ctx.exit(rv)` here! - # note that `rv` may actually contain data like "1" which - # has obvious effects - # more subtle case: `rv=[None, None]` can come out of - # chained commands which all returned `None` -- so it's not - # even always obvious that `rv` indicates success/failure - # by its truthiness/falsiness - ctx.exit() - except (EOFError, KeyboardInterrupt) as e: - echo(file=sys.stderr) - raise Abort() from e - except ClickException as e: - if not standalone_mode: - raise - e.show() - sys.exit(e.exit_code) - except OSError as e: - if e.errno == errno.EPIPE: - sys.stdout = t.cast(t.TextIO, PacifyFlushWrapper(sys.stdout)) - sys.stderr = t.cast(t.TextIO, PacifyFlushWrapper(sys.stderr)) - sys.exit(1) - else: - raise - except Exit as e: - if standalone_mode: - sys.exit(e.exit_code) - else: - # in non-standalone mode, return the exit code - # note that this is only reached if `self.invoke` above raises - # an Exit explicitly -- thus bypassing the check there which - # would return its result - # the results of non-standalone execution may therefore be - # somewhat ambiguous: if there are codepaths which lead to - # `ctx.exit(1)` and to `return 1`, the caller won't be able to - # tell the difference between the two - return e.exit_code - except Abort: - if not standalone_mode: - raise - echo(_("Aborted!"), file=sys.stderr) - sys.exit(1) - - def _main_shell_completion( - self, - ctx_args: t.MutableMapping[str, t.Any], - prog_name: str, - complete_var: t.Optional[str] = None, - ) -> None: - """Check if the shell is asking for tab completion, process - that, then exit early. Called from :meth:`main` before the - program is invoked. - - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. Defaults to - ``_{PROG_NAME}_COMPLETE``. - - .. versionchanged:: 8.2.0 - Dots (``.``) in ``prog_name`` are replaced with underscores (``_``). - """ - if complete_var is None: - complete_name = prog_name.replace("-", "_").replace(".", "_") - complete_var = f"_{complete_name}_COMPLETE".upper() - - instruction = os.environ.get(complete_var) - - if not instruction: - return - - from .shell_completion import shell_complete - - rv = shell_complete(self, ctx_args, prog_name, complete_var, instruction) - sys.exit(rv) - - def __call__(self, *args: t.Any, **kwargs: t.Any) -> t.Any: - """Alias for :meth:`main`.""" - return self.main(*args, **kwargs) - - -class Command(BaseCommand): - """Commands are the basic building block of command line interfaces in - Click. A basic command handles command line parsing and might dispatch - more parsing to commands nested below it. - - :param name: the name of the command to use unless a group overrides it. - :param context_settings: an optional dictionary with defaults that are - passed to the context object. - :param callback: the callback to invoke. This is optional. - :param params: the parameters to register with this command. This can - be either :class:`Option` or :class:`Argument` objects. - :param help: the help string to use for this command. - :param epilog: like the help string but it's printed at the end of the - help page after everything else. - :param short_help: the short help to use for this command. This is - shown on the command listing of the parent command. - :param add_help_option: by default each command registers a ``--help`` - option. This can be disabled by this parameter. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is disabled by default. - If enabled this will add ``--help`` as argument - if no arguments are passed - :param hidden: hide this command from help outputs. - - :param deprecated: issues a message indicating that - the command is deprecated. - - .. versionchanged:: 8.1 - ``help``, ``epilog``, and ``short_help`` are stored unprocessed, - all formatting is done when outputting help text, not at init, - and is done even if not using the ``@command`` decorator. - - .. versionchanged:: 8.0 - Added a ``repr`` showing the command name. - - .. versionchanged:: 7.1 - Added the ``no_args_is_help`` parameter. - - .. versionchanged:: 2.0 - Added the ``context_settings`` parameter. - """ - - def __init__( - self, - name: t.Optional[str], - context_settings: t.Optional[t.MutableMapping[str, t.Any]] = None, - callback: t.Optional[t.Callable[..., t.Any]] = None, - params: t.Optional[t.List["Parameter"]] = None, - help: t.Optional[str] = None, - epilog: t.Optional[str] = None, - short_help: t.Optional[str] = None, - options_metavar: t.Optional[str] = "[OPTIONS]", - add_help_option: bool = True, - no_args_is_help: bool = False, - hidden: bool = False, - deprecated: bool = False, - ) -> None: - super().__init__(name, context_settings) - #: the callback to execute when the command fires. This might be - #: `None` in which case nothing happens. - self.callback = callback - #: the list of parameters for this command in the order they - #: should show up in the help page and execute. Eager parameters - #: will automatically be handled before non eager ones. - self.params: t.List["Parameter"] = params or [] - self.help = help - self.epilog = epilog - self.options_metavar = options_metavar - self.short_help = short_help - self.add_help_option = add_help_option - self.no_args_is_help = no_args_is_help - self.hidden = hidden - self.deprecated = deprecated - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - info_dict.update( - params=[param.to_info_dict() for param in self.get_params(ctx)], - help=self.help, - epilog=self.epilog, - short_help=self.short_help, - hidden=self.hidden, - deprecated=self.deprecated, - ) - return info_dict - - def get_usage(self, ctx: Context) -> str: - """Formats the usage line into a string and returns it. - - Calls :meth:`format_usage` internally. - """ - formatter = ctx.make_formatter() - self.format_usage(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_params(self, ctx: Context) -> t.List["Parameter"]: - rv = self.params - help_option = self.get_help_option(ctx) - - if help_option is not None: - rv = [*rv, help_option] - - return rv - - def format_usage(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the usage line into the formatter. - - This is a low-level method called by :meth:`get_usage`. - """ - pieces = self.collect_usage_pieces(ctx) - formatter.write_usage(ctx.command_path, " ".join(pieces)) - - def collect_usage_pieces(self, ctx: Context) -> t.List[str]: - """Returns all the pieces that go into the usage line and returns - it as a list of strings. - """ - rv = [self.options_metavar] if self.options_metavar else [] - - for param in self.get_params(ctx): - rv.extend(param.get_usage_pieces(ctx)) - - return rv - - def get_help_option_names(self, ctx: Context) -> t.List[str]: - """Returns the names for the help option.""" - all_names = set(ctx.help_option_names) - for param in self.params: - all_names.difference_update(param.opts) - all_names.difference_update(param.secondary_opts) - return list(all_names) - - def get_help_option(self, ctx: Context) -> t.Optional["Option"]: - """Returns the help option object.""" - help_options = self.get_help_option_names(ctx) - - if not help_options or not self.add_help_option: - return None - - def show_help(ctx: Context, param: "Parameter", value: str) -> None: - if value and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - return Option( - help_options, - is_flag=True, - is_eager=True, - expose_value=False, - callback=show_help, - help=_("Show this message and exit."), - ) - - def make_parser(self, ctx: Context) -> OptionParser: - """Creates the underlying option parser for this command.""" - parser = OptionParser(ctx) - for param in self.get_params(ctx): - param.add_to_parser(parser, ctx) - return parser - - def get_help(self, ctx: Context) -> str: - """Formats the help into a string and returns it. - - Calls :meth:`format_help` internally. - """ - formatter = ctx.make_formatter() - self.format_help(ctx, formatter) - return formatter.getvalue().rstrip("\n") - - def get_short_help_str(self, limit: int = 45) -> str: - """Gets short help for the command or makes it by shortening the - long help string. - """ - if self.short_help: - text = inspect.cleandoc(self.short_help) - elif self.help: - text = make_default_short_help(self.help, limit) - else: - text = "" - - if self.deprecated: - text = _("(Deprecated) {text}").format(text=text) - - return text.strip() - - def format_help(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help into the formatter if it exists. - - This is a low-level method called by :meth:`get_help`. - - This calls the following methods: - - - :meth:`format_usage` - - :meth:`format_help_text` - - :meth:`format_options` - - :meth:`format_epilog` - """ - self.format_usage(ctx, formatter) - self.format_help_text(ctx, formatter) - self.format_options(ctx, formatter) - self.format_epilog(ctx, formatter) - - def format_help_text(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the help text to the formatter if it exists.""" - if self.help is not None: - # truncate the help text to the first form feed - text = inspect.cleandoc(self.help).partition("\f")[0] - else: - text = "" - - if self.deprecated: - text = _("(Deprecated) {text}").format(text=text) - - if text: - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(text) - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes all the options into the formatter if they exist.""" - opts = [] - for param in self.get_params(ctx): - rv = param.get_help_record(ctx) - if rv is not None: - opts.append(rv) - - if opts: - with formatter.section(_("Options")): - formatter.write_dl(opts) - - def format_epilog(self, ctx: Context, formatter: HelpFormatter) -> None: - """Writes the epilog into the formatter if it exists.""" - if self.epilog: - epilog = inspect.cleandoc(self.epilog) - formatter.write_paragraph() - - with formatter.indentation(): - formatter.write_text(epilog) - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - parser = self.make_parser(ctx) - opts, args, param_order = parser.parse_args(args=args) - - for param in iter_params_for_processing(param_order, self.get_params(ctx)): - value, args = param.handle_parse_result(ctx, opts, args) - - if args and not ctx.allow_extra_args and not ctx.resilient_parsing: - ctx.fail( - ngettext( - "Got unexpected extra argument ({args})", - "Got unexpected extra arguments ({args})", - len(args), - ).format(args=" ".join(map(str, args))) - ) - - ctx.args = args - ctx._opt_prefixes.update(parser._opt_prefixes) - return args - - def invoke(self, ctx: Context) -> t.Any: - """Given a context, this invokes the attached callback (if it exists) - in the right way. - """ - if self.deprecated: - message = _( - "DeprecationWarning: The command {name!r} is deprecated." - ).format(name=self.name) - echo(style(message, fg="red"), err=True) - - if self.callback is not None: - return ctx.invoke(self.callback, **ctx.params) - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of options and chained multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results: t.List["CompletionItem"] = [] - - if incomplete and not incomplete[0].isalnum(): - for param in self.get_params(ctx): - if ( - not isinstance(param, Option) - or param.hidden - or ( - not param.multiple - and ctx.get_parameter_source(param.name) # type: ignore - is ParameterSource.COMMANDLINE - ) - ): - continue - - results.extend( - CompletionItem(name, help=param.help) - for name in [*param.opts, *param.secondary_opts] - if name.startswith(incomplete) - ) - - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class MultiCommand(Command): - """A multi command is the basic implementation of a command that - dispatches to subcommands. The most common version is the - :class:`Group`. - - :param invoke_without_command: this controls how the multi command itself - is invoked. By default it's only invoked - if a subcommand is provided. - :param no_args_is_help: this controls what happens if no arguments are - provided. This option is enabled by default if - `invoke_without_command` is disabled or disabled - if it's enabled. If enabled this will add - ``--help`` as argument if no arguments are - passed. - :param subcommand_metavar: the string that is used in the documentation - to indicate the subcommand place. - :param chain: if this is set to `True` chaining of multiple subcommands - is enabled. This restricts the form of commands in that - they cannot have optional arguments but it allows - multiple commands to be chained together. - :param result_callback: The result callback to attach to this multi - command. This can be set or changed later with the - :meth:`result_callback` decorator. - :param attrs: Other command arguments described in :class:`Command`. - """ - - allow_extra_args = True - allow_interspersed_args = False - - def __init__( - self, - name: t.Optional[str] = None, - invoke_without_command: bool = False, - no_args_is_help: t.Optional[bool] = None, - subcommand_metavar: t.Optional[str] = None, - chain: bool = False, - result_callback: t.Optional[t.Callable[..., t.Any]] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - - if no_args_is_help is None: - no_args_is_help = not invoke_without_command - - self.no_args_is_help = no_args_is_help - self.invoke_without_command = invoke_without_command - - if subcommand_metavar is None: - if chain: - subcommand_metavar = "COMMAND1 [ARGS]... [COMMAND2 [ARGS]...]..." - else: - subcommand_metavar = "COMMAND [ARGS]..." - - self.subcommand_metavar = subcommand_metavar - self.chain = chain - # The result callback that is stored. This can be set or - # overridden with the :func:`result_callback` decorator. - self._result_callback = result_callback - - if self.chain: - for param in self.params: - if isinstance(param, Argument) and not param.required: - raise RuntimeError( - "Multi commands in chain mode cannot have" - " optional arguments." - ) - - def to_info_dict(self, ctx: Context) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict(ctx) - commands = {} - - for name in self.list_commands(ctx): - command = self.get_command(ctx, name) - - if command is None: - continue - - sub_ctx = ctx._make_sub_context(command) - - with sub_ctx.scope(cleanup=False): - commands[name] = command.to_info_dict(sub_ctx) - - info_dict.update(commands=commands, chain=self.chain) - return info_dict - - def collect_usage_pieces(self, ctx: Context) -> t.List[str]: - rv = super().collect_usage_pieces(ctx) - rv.append(self.subcommand_metavar) - return rv - - def format_options(self, ctx: Context, formatter: HelpFormatter) -> None: - super().format_options(ctx, formatter) - self.format_commands(ctx, formatter) - - def result_callback(self, replace: bool = False) -> t.Callable[[F], F]: - """Adds a result callback to the command. By default if a - result callback is already registered this will chain them but - this can be disabled with the `replace` parameter. The result - callback is invoked with the return value of the subcommand - (or the list of return values from all subcommands if chaining - is enabled) as well as the parameters as they would be passed - to the main callback. - - Example:: - - @click.group() - @click.option('-i', '--input', default=23) - def cli(input): - return 42 - - @cli.result_callback() - def process_result(result, input): - return result + input - - :param replace: if set to `True` an already existing result - callback will be removed. - - .. versionchanged:: 8.0 - Renamed from ``resultcallback``. - - .. versionadded:: 3.0 - """ - - def decorator(f: F) -> F: - old_callback = self._result_callback - - if old_callback is None or replace: - self._result_callback = f - return f - - def function(__value, *args, **kwargs): # type: ignore - inner = old_callback(__value, *args, **kwargs) - return f(inner, *args, **kwargs) - - self._result_callback = rv = update_wrapper(t.cast(F, function), f) - return rv - - return decorator - - def format_commands(self, ctx: Context, formatter: HelpFormatter) -> None: - """Extra format methods for multi methods that adds all the commands - after the options. - """ - commands = [] - for subcommand in self.list_commands(ctx): - cmd = self.get_command(ctx, subcommand) - # What is this, the tool lied about a command. Ignore it - if cmd is None: - continue - if cmd.hidden: - continue - - commands.append((subcommand, cmd)) - - # allow for 3 times the default spacing - if len(commands): - limit = formatter.width - 6 - max(len(cmd[0]) for cmd in commands) - - rows = [] - for subcommand, cmd in commands: - help = cmd.get_short_help_str(limit) - rows.append((subcommand, help)) - - if rows: - with formatter.section(_("Commands")): - formatter.write_dl(rows) - - def parse_args(self, ctx: Context, args: t.List[str]) -> t.List[str]: - if not args and self.no_args_is_help and not ctx.resilient_parsing: - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - rest = super().parse_args(ctx, args) - - if self.chain: - ctx.protected_args = rest - ctx.args = [] - elif rest: - ctx.protected_args, ctx.args = rest[:1], rest[1:] - - return ctx.args - - def invoke(self, ctx: Context) -> t.Any: - def _process_result(value: t.Any) -> t.Any: - if self._result_callback is not None: - value = ctx.invoke(self._result_callback, value, **ctx.params) - return value - - if not ctx.protected_args: - if self.invoke_without_command: - # No subcommand was invoked, so the result callback is - # invoked with the group return value for regular - # groups, or an empty list for chained groups. - with ctx: - rv = super().invoke(ctx) - return _process_result([] if self.chain else rv) - ctx.fail(_("Missing command.")) - - # Fetch args back out - args = [*ctx.protected_args, *ctx.args] - ctx.args = [] - ctx.protected_args = [] - - # If we're not in chain mode, we only allow the invocation of a - # single command but we also inform the current context about the - # name of the command to invoke. - if not self.chain: - # Make sure the context is entered so we do not clean up - # resources until the result processor has worked. - with ctx: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - ctx.invoked_subcommand = cmd_name - super().invoke(ctx) - sub_ctx = cmd.make_context(cmd_name, args, parent=ctx) - with sub_ctx: - return _process_result(sub_ctx.command.invoke(sub_ctx)) - - # In chain mode we create the contexts step by step, but after the - # base command has been invoked. Because at that point we do not - # know the subcommands yet, the invoked subcommand attribute is - # set to ``*`` to inform the command that subcommands are executed - # but nothing else. - with ctx: - ctx.invoked_subcommand = "*" if args else None - super().invoke(ctx) - - # Otherwise we make every single context and invoke them in a - # chain. In that case the return value to the result processor - # is the list of all invoked subcommand's results. - contexts = [] - while args: - cmd_name, cmd, args = self.resolve_command(ctx, args) - assert cmd is not None - sub_ctx = cmd.make_context( - cmd_name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - ) - contexts.append(sub_ctx) - args, sub_ctx.args = sub_ctx.args, [] - - rv = [] - for sub_ctx in contexts: - with sub_ctx: - rv.append(sub_ctx.command.invoke(sub_ctx)) - return _process_result(rv) - - def resolve_command( - self, ctx: Context, args: t.List[str] - ) -> t.Tuple[t.Optional[str], t.Optional[Command], t.List[str]]: - cmd_name = make_str(args[0]) - original_cmd_name = cmd_name - - # Get the command - cmd = self.get_command(ctx, cmd_name) - - # If we can't find the command but there is a normalization - # function available, we try with that one. - if cmd is None and ctx.token_normalize_func is not None: - cmd_name = ctx.token_normalize_func(cmd_name) - cmd = self.get_command(ctx, cmd_name) - - # If we don't find the command we want to show an error message - # to the user that it was not provided. However, there is - # something else we should do: if the first argument looks like - # an option we want to kick off parsing again for arguments to - # resolve things like --help which now should go to the main - # place. - if cmd is None and not ctx.resilient_parsing: - if split_opt(cmd_name)[0]: - self.parse_args(ctx, ctx.args) - ctx.fail(_("No such command {name!r}.").format(name=original_cmd_name)) - return cmd_name if cmd else None, cmd, args[1:] - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - """Given a context and a command name, this returns a - :class:`Command` object if it exists or returns `None`. - """ - raise NotImplementedError - - def list_commands(self, ctx: Context) -> t.List[str]: - """Returns a list of subcommand names in the order they should - appear. - """ - return [] - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. Looks - at the names of options, subcommands, and chained - multi-commands. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - results = [ - CompletionItem(name, help=command.get_short_help_str()) - for name, command in _complete_visible_commands(ctx, incomplete) - ] - results.extend(super().shell_complete(ctx, incomplete)) - return results - - -class Group(MultiCommand): - """A group allows a command to have subcommands attached. This is - the most common way to implement nesting in Click. - - :param name: The name of the group command. - :param commands: A dict mapping names to :class:`Command` objects. - Can also be a list of :class:`Command`, which will use - :attr:`Command.name` to create the dict. - :param attrs: Other command arguments described in - :class:`MultiCommand`, :class:`Command`, and - :class:`BaseCommand`. - - .. versionchanged:: 8.0 - The ``commands`` argument can be a list of command objects. - """ - - #: If set, this is used by the group's :meth:`command` decorator - #: as the default :class:`Command` class. This is useful to make all - #: subcommands use a custom command class. - #: - #: .. versionadded:: 8.0 - command_class: t.Optional[t.Type[Command]] = None - - #: If set, this is used by the group's :meth:`group` decorator - #: as the default :class:`Group` class. This is useful to make all - #: subgroups use a custom group class. - #: - #: If set to the special value :class:`type` (literally - #: ``group_class = type``), this group's class will be used as the - #: default class. This makes a custom group class continue to make - #: custom groups. - #: - #: .. versionadded:: 8.0 - group_class: t.Optional[t.Union[t.Type["Group"], t.Type[type]]] = None - # Literal[type] isn't valid, so use Type[type] - - def __init__( - self, - name: t.Optional[str] = None, - commands: t.Optional[ - t.Union[t.MutableMapping[str, Command], t.Sequence[Command]] - ] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - - if commands is None: - commands = {} - elif isinstance(commands, abc.Sequence): - commands = {c.name: c for c in commands if c.name is not None} - - #: The registered subcommands by their exported names. - self.commands: t.MutableMapping[str, Command] = commands - - def add_command(self, cmd: Command, name: t.Optional[str] = None) -> None: - """Registers another :class:`Command` with this group. If the name - is not provided, the name of the command is used. - """ - name = name or cmd.name - if name is None: - raise TypeError("Command has no name.") - _check_multicommand(self, name, cmd, register=True) - self.commands[name] = cmd - - @t.overload - def command(self, __func: t.Callable[..., t.Any]) -> Command: - ... - - @t.overload - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], Command]: - ... - - def command( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], Command], Command]: - """A shortcut decorator for declaring and attaching a command to - the group. This takes the same arguments as :func:`command` and - immediately registers the created command with this group by - calling :meth:`add_command`. - - To customize the command class used, set the - :attr:`command_class` attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`command_class` attribute. - """ - from .decorators import command - - func: t.Optional[t.Callable[..., t.Any]] = None - - if args and callable(args[0]): - assert ( - len(args) == 1 and not kwargs - ), "Use 'command(**kwargs)(callable)' to provide arguments." - (func,) = args - args = () - - if self.command_class and kwargs.get("cls") is None: - kwargs["cls"] = self.command_class - - def decorator(f: t.Callable[..., t.Any]) -> Command: - cmd: Command = command(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - @t.overload - def group(self, __func: t.Callable[..., t.Any]) -> "Group": - ... - - @t.overload - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], "Group"]: - ... - - def group( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Union[t.Callable[[t.Callable[..., t.Any]], "Group"], "Group"]: - """A shortcut decorator for declaring and attaching a group to - the group. This takes the same arguments as :func:`group` and - immediately registers the created group with this group by - calling :meth:`add_command`. - - To customize the group class used, set the :attr:`group_class` - attribute. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.0 - Added the :attr:`group_class` attribute. - """ - from .decorators import group - - func: t.Optional[t.Callable[..., t.Any]] = None - - if args and callable(args[0]): - assert ( - len(args) == 1 and not kwargs - ), "Use 'group(**kwargs)(callable)' to provide arguments." - (func,) = args - args = () - - if self.group_class is not None and kwargs.get("cls") is None: - if self.group_class is type: - kwargs["cls"] = type(self) - else: - kwargs["cls"] = self.group_class - - def decorator(f: t.Callable[..., t.Any]) -> "Group": - cmd: Group = group(*args, **kwargs)(f) - self.add_command(cmd) - return cmd - - if func is not None: - return decorator(func) - - return decorator - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - return self.commands.get(cmd_name) - - def list_commands(self, ctx: Context) -> t.List[str]: - return sorted(self.commands) - - -class CommandCollection(MultiCommand): - """A command collection is a multi command that merges multiple multi - commands together into one. This is a straightforward implementation - that accepts a list of different multi commands as sources and - provides all the commands for each of them. - - See :class:`MultiCommand` and :class:`Command` for the description of - ``name`` and ``attrs``. - """ - - def __init__( - self, - name: t.Optional[str] = None, - sources: t.Optional[t.List[MultiCommand]] = None, - **attrs: t.Any, - ) -> None: - super().__init__(name, **attrs) - #: The list of registered multi commands. - self.sources: t.List[MultiCommand] = sources or [] - - def add_source(self, multi_cmd: MultiCommand) -> None: - """Adds a new multi command to the chain dispatcher.""" - self.sources.append(multi_cmd) - - def get_command(self, ctx: Context, cmd_name: str) -> t.Optional[Command]: - for source in self.sources: - rv = source.get_command(ctx, cmd_name) - - if rv is not None: - if self.chain: - _check_multicommand(self, cmd_name, rv) - - return rv - - return None - - def list_commands(self, ctx: Context) -> t.List[str]: - rv: t.Set[str] = set() - - for source in self.sources: - rv.update(source.list_commands(ctx)) - - return sorted(rv) - - -def _check_iter(value: t.Any) -> t.Iterator[t.Any]: - """Check if the value is iterable but not a string. Raises a type - error, or return an iterator over the value. - """ - if isinstance(value, str): - raise TypeError - - return iter(value) - - -class Parameter: - r"""A parameter to a command comes in two versions: they are either - :class:`Option`\s or :class:`Argument`\s. Other subclasses are currently - not supported by design as some of the internals for parsing are - intentionally not finalized. - - Some settings are supported by both options and arguments. - - :param param_decls: the parameter declarations for this option or - argument. This is a list of flags or argument - names. - :param type: the type that should be used. Either a :class:`ParamType` - or a Python type. The latter is converted into the former - automatically if supported. - :param required: controls if this is optional or not. - :param default: the default value if omitted. This can also be a callable, - in which case it's invoked when the default is needed - without any arguments. - :param callback: A function to further process or validate the value - after type conversion. It is called as ``f(ctx, param, value)`` - and must return the value. It is called for all sources, - including prompts. - :param nargs: the number of arguments to match. If not ``1`` the return - value is a tuple instead of single value. The default for - nargs is ``1`` (except if the type is a tuple, then it's - the arity of the tuple). If ``nargs=-1``, all remaining - parameters are collected. - :param metavar: how the value is represented in the help page. - :param expose_value: if this is `True` then the value is passed onwards - to the command callback and stored on the context, - otherwise it's skipped. - :param is_eager: eager values are processed before non eager ones. This - should not be set for arguments or it will inverse the - order of processing. - :param envvar: a string or list of strings that are environment variables - that should be checked. - :param shell_complete: A function that returns custom shell - completions. Used instead of the param's type completion if - given. Takes ``ctx, param, incomplete`` and must return a list - of :class:`~click.shell_completion.CompletionItem` or a list of - strings. - - .. versionchanged:: 8.0 - ``process_value`` validates required parameters and bounded - ``nargs``, and invokes the parameter callback before returning - the value. This allows the callback to validate prompts. - ``full_process_value`` is removed. - - .. versionchanged:: 8.0 - ``autocompletion`` is renamed to ``shell_complete`` and has new - semantics described above. The old name is deprecated and will - be removed in 8.1, until then it will be wrapped to match the - new requirements. - - .. versionchanged:: 8.0 - For ``multiple=True, nargs>1``, the default must be a list of - tuples. - - .. versionchanged:: 8.0 - Setting a default is no longer required for ``nargs>1``, it will - default to ``None``. ``multiple=True`` or ``nargs=-1`` will - default to ``()``. - - .. versionchanged:: 7.1 - Empty environment variables are ignored rather than taking the - empty string value. This makes it possible for scripts to clear - variables if they can't unset them. - - .. versionchanged:: 2.0 - Changed signature for parameter callback to also be passed the - parameter. The old callback format will still work, but it will - raise a warning to give you a chance to migrate the code easier. - """ - - param_type_name = "parameter" - - def __init__( - self, - param_decls: t.Optional[t.Sequence[str]] = None, - type: t.Optional[t.Union[types.ParamType, t.Any]] = None, - required: bool = False, - default: t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]] = None, - callback: t.Optional[t.Callable[[Context, "Parameter", t.Any], t.Any]] = None, - nargs: t.Optional[int] = None, - multiple: bool = False, - metavar: t.Optional[str] = None, - expose_value: bool = True, - is_eager: bool = False, - envvar: t.Optional[t.Union[str, t.Sequence[str]]] = None, - shell_complete: t.Optional[ - t.Callable[ - [Context, "Parameter", str], - t.Union[t.List["CompletionItem"], t.List[str]], - ] - ] = None, - ) -> None: - self.name: t.Optional[str] - self.opts: t.List[str] - self.secondary_opts: t.List[str] - self.name, self.opts, self.secondary_opts = self._parse_decls( - param_decls or (), expose_value - ) - self.type: types.ParamType = types.convert_type(type, default) - - # Default nargs to what the type tells us if we have that - # information available. - if nargs is None: - if self.type.is_composite: - nargs = self.type.arity - else: - nargs = 1 - - self.required = required - self.callback = callback - self.nargs = nargs - self.multiple = multiple - self.expose_value = expose_value - self.default = default - self.is_eager = is_eager - self.metavar = metavar - self.envvar = envvar - self._custom_shell_complete = shell_complete - - if __debug__: - if self.type.is_composite and nargs != self.type.arity: - raise ValueError( - f"'nargs' must be {self.type.arity} (or None) for" - f" type {self.type!r}, but it was {nargs}." - ) - - # Skip no default or callable default. - check_default = default if not callable(default) else None - - if check_default is not None: - if multiple: - try: - # Only check the first value against nargs. - check_default = next(_check_iter(check_default), None) - except TypeError: - raise ValueError( - "'default' must be a list when 'multiple' is true." - ) from None - - # Can be None for multiple with empty default. - if nargs != 1 and check_default is not None: - try: - _check_iter(check_default) - except TypeError: - if multiple: - message = ( - "'default' must be a list of lists when 'multiple' is" - " true and 'nargs' != 1." - ) - else: - message = "'default' must be a list when 'nargs' != 1." - - raise ValueError(message) from None - - if nargs > 1 and len(check_default) != nargs: - subject = "item length" if multiple else "length" - raise ValueError( - f"'default' {subject} must match nargs={nargs}." - ) - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - return { - "name": self.name, - "param_type_name": self.param_type_name, - "opts": self.opts, - "secondary_opts": self.secondary_opts, - "type": self.type.to_info_dict(), - "required": self.required, - "nargs": self.nargs, - "multiple": self.multiple, - "default": self.default, - "envvar": self.envvar, - } - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} {self.name}>" - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - raise NotImplementedError() - - @property - def human_readable_name(self) -> str: - """Returns the human readable name of this parameter. This is the - same as the name for options, but the metavar for arguments. - """ - return self.name # type: ignore - - def make_metavar(self) -> str: - if self.metavar is not None: - return self.metavar - - metavar = self.type.get_metavar(self) - - if metavar is None: - metavar = self.type.name.upper() - - if self.nargs != 1: - metavar += "..." - - return metavar - - @t.overload - def get_default( - self, ctx: Context, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - """Get the default for the parameter. Tries - :meth:`Context.lookup_default` first, then the local default. - - :param ctx: Current context. - :param call: If the default is a callable, call it. Disable to - return the callable instead. - - .. versionchanged:: 8.0.2 - Type casting is no longer performed when getting a default. - - .. versionchanged:: 8.0.1 - Type casting can fail in resilient parsing mode. Invalid - defaults will not prevent showing help text. - - .. versionchanged:: 8.0 - Looks at ``ctx.default_map`` first. - - .. versionchanged:: 8.0 - Added the ``call`` parameter. - """ - value = ctx.lookup_default(self.name, call=False) # type: ignore - - if value is None: - value = self.default - - if call and callable(value): - value = value() - - return value - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - raise NotImplementedError() - - def consume_value( - self, ctx: Context, opts: t.Mapping[str, t.Any] - ) -> t.Tuple[t.Any, ParameterSource]: - value = opts.get(self.name) # type: ignore - source = ParameterSource.COMMANDLINE - - if value is None: - value = self.value_from_envvar(ctx) - source = ParameterSource.ENVIRONMENT - - if value is None: - value = ctx.lookup_default(self.name) # type: ignore - source = ParameterSource.DEFAULT_MAP - - if value is None: - value = self.get_default(ctx) - source = ParameterSource.DEFAULT - - return value, source - - def type_cast_value(self, ctx: Context, value: t.Any) -> t.Any: - """Convert and validate a value against the option's - :attr:`type`, :attr:`multiple`, and :attr:`nargs`. - """ - if value is None: - return () if self.multiple or self.nargs == -1 else None - - def check_iter(value: t.Any) -> t.Iterator[t.Any]: - try: - return _check_iter(value) - except TypeError: - # This should only happen when passing in args manually, - # the parser should construct an iterable when parsing - # the command line. - raise BadParameter( - _("Value must be an iterable."), ctx=ctx, param=self - ) from None - - if self.nargs == 1 or self.type.is_composite: - - def convert(value: t.Any) -> t.Any: - return self.type(value, param=self, ctx=ctx) - - elif self.nargs == -1: - - def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] - return tuple(self.type(x, self, ctx) for x in check_iter(value)) - - else: # nargs > 1 - - def convert(value: t.Any) -> t.Any: # t.Tuple[t.Any, ...] - value = tuple(check_iter(value)) - - if len(value) != self.nargs: - raise BadParameter( - ngettext( - "Takes {nargs} values but 1 was given.", - "Takes {nargs} values but {len} were given.", - len(value), - ).format(nargs=self.nargs, len=len(value)), - ctx=ctx, - param=self, - ) - - return tuple(self.type(x, self, ctx) for x in value) - - if self.multiple: - return tuple(convert(x) for x in check_iter(value)) - - return convert(value) - - def value_is_missing(self, value: t.Any) -> bool: - if value is None: - return True - - if (self.nargs != 1 or self.multiple) and value == (): - return True - - return False - - def process_value(self, ctx: Context, value: t.Any) -> t.Any: - value = self.type_cast_value(ctx, value) - - if self.required and self.value_is_missing(value): - raise MissingParameter(ctx=ctx, param=self) - - if self.callback is not None: - value = self.callback(ctx, self, value) - - return value - - def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: - if self.envvar is None: - return None - - if isinstance(self.envvar, str): - rv = os.environ.get(self.envvar) - - if rv: - return rv - else: - for envvar in self.envvar: - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: - rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) - - if rv is not None and self.nargs != 1: - rv = self.type.split_envvar_value(rv) - - return rv - - def handle_parse_result( - self, ctx: Context, opts: t.Mapping[str, t.Any], args: t.List[str] - ) -> t.Tuple[t.Any, t.List[str]]: - with augment_usage_errors(ctx, param=self): - value, source = self.consume_value(ctx, opts) - ctx.set_parameter_source(self.name, source) # type: ignore - - try: - value = self.process_value(ctx, value) - except Exception: - if not ctx.resilient_parsing: - raise - - value = None - - if self.expose_value: - ctx.params[self.name] = value # type: ignore - - return value, args - - def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: - pass - - def get_usage_pieces(self, ctx: Context) -> t.List[str]: - return [] - - def get_error_hint(self, ctx: Context) -> str: - """Get a stringified version of the param for use in error messages to - indicate which param caused the error. - """ - hint_list = self.opts or [self.human_readable_name] - return " / ".join(f"'{x}'" for x in hint_list) - - def shell_complete(self, ctx: Context, incomplete: str) -> t.List["CompletionItem"]: - """Return a list of completions for the incomplete value. If a - ``shell_complete`` function was given during init, it is used. - Otherwise, the :attr:`type` - :meth:`~click.types.ParamType.shell_complete` function is used. - - :param ctx: Invocation context for this command. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - if self._custom_shell_complete is not None: - results = self._custom_shell_complete(ctx, self, incomplete) - - if results and isinstance(results[0], str): - from click.shell_completion import CompletionItem - - results = [CompletionItem(c) for c in results] - - return t.cast(t.List["CompletionItem"], results) - - return self.type.shell_complete(ctx, self, incomplete) - - -class Option(Parameter): - """Options are usually optional values on the command line and - have some extra features that arguments don't have. - - All other parameters are passed onwards to the parameter constructor. - - :param show_default: Show the default value for this option in its - help text. Values are not shown by default, unless - :attr:`Context.show_default` is ``True``. If this value is a - string, it shows that string in parentheses instead of the - actual value. This is particularly useful for dynamic options. - For single option boolean flags, the default remains hidden if - its value is ``False``. - :param show_envvar: Controls if an environment variable should be - shown on the help page. Normally, environment variables are not - shown. - :param prompt: If set to ``True`` or a non empty string then the - user will be prompted for input. If set to ``True`` the prompt - will be the option name capitalized. - :param confirmation_prompt: Prompt a second time to confirm the - value if it was prompted for. Can be set to a string instead of - ``True`` to customize the message. - :param prompt_required: If set to ``False``, the user will be - prompted for input only when the option was specified as a flag - without a value. - :param hide_input: If this is ``True`` then the input on the prompt - will be hidden from the user. This is useful for password input. - :param is_flag: forces this option to act as a flag. The default is - auto detection. - :param flag_value: which value should be used for this flag if it's - enabled. This is set to a boolean automatically if - the option string contains a slash to mark two options. - :param multiple: if this is set to `True` then the argument is accepted - multiple times and recorded. This is similar to ``nargs`` - in how it works but supports arbitrary number of - arguments. - :param count: this flag makes an option increment an integer. - :param allow_from_autoenv: if this is enabled then the value of this - parameter will be pulled from an environment - variable in case a prefix is defined on the - context. - :param help: the help string. - :param hidden: hide this option from help outputs. - :param attrs: Other command arguments described in :class:`Parameter`. - - .. versionchanged:: 8.1.0 - Help text indentation is cleaned here instead of only in the - ``@option`` decorator. - - .. versionchanged:: 8.1.0 - The ``show_default`` parameter overrides - ``Context.show_default``. - - .. versionchanged:: 8.1.0 - The default of a single option boolean flag is not shown if the - default value is ``False``. - - .. versionchanged:: 8.0.1 - ``type`` is detected from ``flag_value`` if given. - """ - - param_type_name = "option" - - def __init__( - self, - param_decls: t.Optional[t.Sequence[str]] = None, - show_default: t.Union[bool, str, None] = None, - prompt: t.Union[bool, str] = False, - confirmation_prompt: t.Union[bool, str] = False, - prompt_required: bool = True, - hide_input: bool = False, - is_flag: t.Optional[bool] = None, - flag_value: t.Optional[t.Any] = None, - multiple: bool = False, - count: bool = False, - allow_from_autoenv: bool = True, - type: t.Optional[t.Union[types.ParamType, t.Any]] = None, - help: t.Optional[str] = None, - hidden: bool = False, - show_choices: bool = True, - show_envvar: bool = False, - **attrs: t.Any, - ) -> None: - if help: - help = inspect.cleandoc(help) - - default_is_missing = "default" not in attrs - super().__init__(param_decls, type=type, multiple=multiple, **attrs) - - if prompt is True: - if self.name is None: - raise TypeError("'name' is required with 'prompt=True'.") - - prompt_text: t.Optional[str] = self.name.replace("_", " ").capitalize() - elif prompt is False: - prompt_text = None - else: - prompt_text = prompt - - self.prompt = prompt_text - self.confirmation_prompt = confirmation_prompt - self.prompt_required = prompt_required - self.hide_input = hide_input - self.hidden = hidden - - # If prompt is enabled but not required, then the option can be - # used as a flag to indicate using prompt or flag_value. - self._flag_needs_value = self.prompt is not None and not self.prompt_required - - if is_flag is None: - if flag_value is not None: - # Implicitly a flag because flag_value was set. - is_flag = True - elif self._flag_needs_value: - # Not a flag, but when used as a flag it shows a prompt. - is_flag = False - else: - # Implicitly a flag because flag options were given. - is_flag = bool(self.secondary_opts) - elif is_flag is False and not self._flag_needs_value: - # Not a flag, and prompt is not enabled, can be used as a - # flag if flag_value is set. - self._flag_needs_value = flag_value is not None - - self.default: t.Union[t.Any, t.Callable[[], t.Any]] - - if is_flag and default_is_missing and not self.required: - if multiple: - self.default = () - else: - self.default = False - - if flag_value is None: - flag_value = not self.default - - self.type: types.ParamType - if is_flag and type is None: - # Re-guess the type from the flag value instead of the - # default. - self.type = types.convert_type(None, flag_value) - - self.is_flag: bool = is_flag - self.is_bool_flag: bool = is_flag and isinstance(self.type, types.BoolParamType) - self.flag_value: t.Any = flag_value - - # Counting - self.count = count - if count: - if type is None: - self.type = types.IntRange(min=0) - if default_is_missing: - self.default = 0 - - self.allow_from_autoenv = allow_from_autoenv - self.help = help - self.show_default = show_default - self.show_choices = show_choices - self.show_envvar = show_envvar - - if __debug__: - if self.nargs == -1: - raise TypeError("nargs=-1 is not supported for options.") - - if self.prompt and self.is_flag and not self.is_bool_flag: - raise TypeError("'prompt' is not valid for non-boolean flag.") - - if not self.is_bool_flag and self.secondary_opts: - raise TypeError("Secondary flag is not valid for non-boolean flag.") - - if self.is_bool_flag and self.hide_input and self.prompt is not None: - raise TypeError( - "'prompt' with 'hide_input' is not valid for boolean flag." - ) - - if self.count: - if self.multiple: - raise TypeError("'count' is not valid with 'multiple'.") - - if self.is_flag: - raise TypeError("'count' is not valid with 'is_flag'.") - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - help=self.help, - prompt=self.prompt, - is_flag=self.is_flag, - flag_value=self.flag_value, - count=self.count, - hidden=self.hidden, - ) - return info_dict - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - opts = [] - secondary_opts = [] - name = None - possible_names = [] - - for decl in decls: - if decl.isidentifier(): - if name is not None: - raise TypeError(f"Name '{name}' defined twice") - name = decl - else: - split_char = ";" if decl[:1] == "/" else "/" - if split_char in decl: - first, second = decl.split(split_char, 1) - first = first.rstrip() - if first: - possible_names.append(split_opt(first)) - opts.append(first) - second = second.lstrip() - if second: - secondary_opts.append(second.lstrip()) - if first == second: - raise ValueError( - f"Boolean option {decl!r} cannot use the" - " same flag for true/false." - ) - else: - possible_names.append(split_opt(decl)) - opts.append(decl) - - if name is None and possible_names: - possible_names.sort(key=lambda x: -len(x[0])) # group long options first - name = possible_names[0][1].replace("-", "_").lower() - if not name.isidentifier(): - name = None - - if name is None: - if not expose_value: - return None, opts, secondary_opts - raise TypeError("Could not determine name for option") - - if not opts and not secondary_opts: - raise TypeError( - f"No options defined but a name was passed ({name})." - " Did you mean to declare an argument instead? Did" - f" you mean to pass '--{name}'?" - ) - - return name, opts, secondary_opts - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - if self.multiple: - action = "append" - elif self.count: - action = "count" - else: - action = "store" - - if self.is_flag: - action = f"{action}_const" - - if self.is_bool_flag and self.secondary_opts: - parser.add_option( - obj=self, opts=self.opts, dest=self.name, action=action, const=True - ) - parser.add_option( - obj=self, - opts=self.secondary_opts, - dest=self.name, - action=action, - const=False, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - const=self.flag_value, - ) - else: - parser.add_option( - obj=self, - opts=self.opts, - dest=self.name, - action=action, - nargs=self.nargs, - ) - - def get_help_record(self, ctx: Context) -> t.Optional[t.Tuple[str, str]]: - if self.hidden: - return None - - any_prefix_is_slash = False - - def _write_opts(opts: t.Sequence[str]) -> str: - nonlocal any_prefix_is_slash - - rv, any_slashes = join_options(opts) - - if any_slashes: - any_prefix_is_slash = True - - if not self.is_flag and not self.count: - rv += f" {self.make_metavar()}" - - return rv - - rv = [_write_opts(self.opts)] - - if self.secondary_opts: - rv.append(_write_opts(self.secondary_opts)) - - help = self.help or "" - extra = [] - - if self.show_envvar: - envvar = self.envvar - - if envvar is None: - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - - if envvar is not None: - var_str = ( - envvar - if isinstance(envvar, str) - else ", ".join(str(d) for d in envvar) - ) - extra.append(_("env var: {var}").format(var=var_str)) - - # Temporarily enable resilient parsing to avoid type casting - # failing for the default. Might be possible to extend this to - # help formatting in general. - resilient = ctx.resilient_parsing - ctx.resilient_parsing = True - - try: - default_value = self.get_default(ctx, call=False) - finally: - ctx.resilient_parsing = resilient - - show_default = False - show_default_is_str = False - - if self.show_default is not None: - if isinstance(self.show_default, str): - show_default_is_str = show_default = True - else: - show_default = self.show_default - elif ctx.show_default is not None: - show_default = ctx.show_default - - if show_default_is_str or (show_default and (default_value is not None)): - if show_default_is_str: - default_string = f"({self.show_default})" - elif isinstance(default_value, (list, tuple)): - default_string = ", ".join(str(d) for d in default_value) - elif inspect.isfunction(default_value): - default_string = _("(dynamic)") - elif self.is_bool_flag and self.secondary_opts: - # For boolean flags that have distinct True/False opts, - # use the opt without prefix instead of the value. - default_string = split_opt( - (self.opts if self.default else self.secondary_opts)[0] - )[1] - elif self.is_bool_flag and not self.secondary_opts and not default_value: - default_string = "" - else: - default_string = str(default_value) - - if default_string: - extra.append(_("default: {default}").format(default=default_string)) - - if ( - isinstance(self.type, types._NumberRangeBase) - # skip count with default range type - and not (self.count and self.type.min == 0 and self.type.max is None) - ): - range_str = self.type._describe_range() - - if range_str: - extra.append(range_str) - - if self.required: - extra.append(_("required")) - - if extra: - extra_str = "; ".join(extra) - help = f"{help} [{extra_str}]" if help else f"[{extra_str}]" - - return ("; " if any_prefix_is_slash else " / ").join(rv), help - - @t.overload - def get_default( - self, ctx: Context, call: "te.Literal[True]" = True - ) -> t.Optional[t.Any]: - ... - - @t.overload - def get_default( - self, ctx: Context, call: bool = ... - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - ... - - def get_default( - self, ctx: Context, call: bool = True - ) -> t.Optional[t.Union[t.Any, t.Callable[[], t.Any]]]: - # If we're a non boolean flag our default is more complex because - # we need to look at all flags in the same group to figure out - # if we're the default one in which case we return the flag - # value as default. - if self.is_flag and not self.is_bool_flag: - for param in ctx.command.params: - if param.name == self.name and param.default: - return t.cast(Option, param).flag_value - - return None - - return super().get_default(ctx, call=call) - - def prompt_for_value(self, ctx: Context) -> t.Any: - """This is an alternative flow that can be activated in the full - value processing if a value does not exist. It will prompt the - user until a valid value exists and then returns the processed - value as result. - """ - assert self.prompt is not None - - # Calculate the default before prompting anything to be stable. - default = self.get_default(ctx) - - # If this is a prompt for a flag we need to handle this - # differently. - if self.is_bool_flag: - return confirm(self.prompt, default) - - return prompt( - self.prompt, - default=default, - type=self.type, - hide_input=self.hide_input, - show_choices=self.show_choices, - confirmation_prompt=self.confirmation_prompt, - value_proc=lambda x: self.process_value(ctx, x), - ) - - def resolve_envvar_value(self, ctx: Context) -> t.Optional[str]: - rv = super().resolve_envvar_value(ctx) - - if rv is not None: - return rv - - if ( - self.allow_from_autoenv - and ctx.auto_envvar_prefix is not None - and self.name is not None - ): - envvar = f"{ctx.auto_envvar_prefix}_{self.name.upper()}" - rv = os.environ.get(envvar) - - if rv: - return rv - - return None - - def value_from_envvar(self, ctx: Context) -> t.Optional[t.Any]: - rv: t.Optional[t.Any] = self.resolve_envvar_value(ctx) - - if rv is None: - return None - - value_depth = (self.nargs != 1) + bool(self.multiple) - - if value_depth > 0: - rv = self.type.split_envvar_value(rv) - - if self.multiple and self.nargs != 1: - rv = batch(rv, self.nargs) - - return rv - - def consume_value( - self, ctx: Context, opts: t.Mapping[str, "Parameter"] - ) -> t.Tuple[t.Any, ParameterSource]: - value, source = super().consume_value(ctx, opts) - - # The parser will emit a sentinel value if the option can be - # given as a flag without a value. This is different from None - # to distinguish from the flag not being given at all. - if value is _flag_needs_value: - if self.prompt is not None and not ctx.resilient_parsing: - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - else: - value = self.flag_value - source = ParameterSource.COMMANDLINE - - elif ( - self.multiple - and value is not None - and any(v is _flag_needs_value for v in value) - ): - value = [self.flag_value if v is _flag_needs_value else v for v in value] - source = ParameterSource.COMMANDLINE - - # The value wasn't set, or used the param's default, prompt if - # prompting is enabled. - elif ( - source in {None, ParameterSource.DEFAULT} - and self.prompt is not None - and (self.required or self.prompt_required) - and not ctx.resilient_parsing - ): - value = self.prompt_for_value(ctx) - source = ParameterSource.PROMPT - - return value, source - - -class Argument(Parameter): - """Arguments are positional parameters to a command. They generally - provide fewer features than options but can have infinite ``nargs`` - and are required by default. - - All parameters are passed onwards to the constructor of :class:`Parameter`. - """ - - param_type_name = "argument" - - def __init__( - self, - param_decls: t.Sequence[str], - required: t.Optional[bool] = None, - **attrs: t.Any, - ) -> None: - if required is None: - if attrs.get("default") is not None: - required = False - else: - required = attrs.get("nargs", 1) > 0 - - if "multiple" in attrs: - raise TypeError("__init__() got an unexpected keyword argument 'multiple'.") - - super().__init__(param_decls, required=required, **attrs) - - if __debug__: - if self.default is not None and self.nargs == -1: - raise TypeError("'default' is not supported for nargs=-1.") - - @property - def human_readable_name(self) -> str: - if self.metavar is not None: - return self.metavar - return self.name.upper() # type: ignore - - def make_metavar(self) -> str: - if self.metavar is not None: - return self.metavar - var = self.type.get_metavar(self) - if not var: - var = self.name.upper() # type: ignore - if not self.required: - var = f"[{var}]" - if self.nargs != 1: - var += "..." - return var - - def _parse_decls( - self, decls: t.Sequence[str], expose_value: bool - ) -> t.Tuple[t.Optional[str], t.List[str], t.List[str]]: - if not decls: - if not expose_value: - return None, [], [] - raise TypeError("Could not determine name for argument") - if len(decls) == 1: - name = arg = decls[0] - name = name.replace("-", "_").lower() - else: - raise TypeError( - "Arguments take exactly one parameter declaration, got" - f" {len(decls)}." - ) - return name, [arg], [] - - def get_usage_pieces(self, ctx: Context) -> t.List[str]: - return [self.make_metavar()] - - def get_error_hint(self, ctx: Context) -> str: - return f"'{self.make_metavar()}'" - - def add_to_parser(self, parser: OptionParser, ctx: Context) -> None: - parser.add_argument(dest=self.name, nargs=self.nargs, obj=self) diff --git a/myenv/Lib/site-packages/click/decorators.py b/myenv/Lib/site-packages/click/decorators.py deleted file mode 100644 index d9bba95..0000000 --- a/myenv/Lib/site-packages/click/decorators.py +++ /dev/null @@ -1,561 +0,0 @@ -import inspect -import types -import typing as t -from functools import update_wrapper -from gettext import gettext as _ - -from .core import Argument -from .core import Command -from .core import Context -from .core import Group -from .core import Option -from .core import Parameter -from .globals import get_current_context -from .utils import echo - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") -T = t.TypeVar("T") -_AnyCallable = t.Callable[..., t.Any] -FC = t.TypeVar("FC", bound=t.Union[_AnyCallable, Command]) - - -def pass_context(f: "t.Callable[te.Concatenate[Context, P], R]") -> "t.Callable[P, R]": - """Marks a callback as wanting to receive the current context - object as first argument. - """ - - def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": - return f(get_current_context(), *args, **kwargs) - - return update_wrapper(new_func, f) - - -def pass_obj(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": - """Similar to :func:`pass_context`, but only pass the object on the - context onwards (:attr:`Context.obj`). This is useful if that object - represents the state of a nested system. - """ - - def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": - return f(get_current_context().obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - -def make_pass_decorator( - object_type: t.Type[T], ensure: bool = False -) -> t.Callable[["t.Callable[te.Concatenate[T, P], R]"], "t.Callable[P, R]"]: - """Given an object type this creates a decorator that will work - similar to :func:`pass_obj` but instead of passing the object of the - current context, it will find the innermost context of type - :func:`object_type`. - - This generates a decorator that works roughly like this:: - - from functools import update_wrapper - - def decorator(f): - @pass_context - def new_func(ctx, *args, **kwargs): - obj = ctx.find_object(object_type) - return ctx.invoke(f, obj, *args, **kwargs) - return update_wrapper(new_func, f) - return decorator - - :param object_type: the type of the object to pass. - :param ensure: if set to `True`, a new object will be created and - remembered on the context if it's not there yet. - """ - - def decorator(f: "t.Callable[te.Concatenate[T, P], R]") -> "t.Callable[P, R]": - def new_func(*args: "P.args", **kwargs: "P.kwargs") -> "R": - ctx = get_current_context() - - obj: t.Optional[T] - if ensure: - obj = ctx.ensure_object(object_type) - else: - obj = ctx.find_object(object_type) - - if obj is None: - raise RuntimeError( - "Managed to invoke callback without a context" - f" object of type {object_type.__name__!r}" - " existing." - ) - - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - return decorator # type: ignore[return-value] - - -def pass_meta_key( - key: str, *, doc_description: t.Optional[str] = None -) -> "t.Callable[[t.Callable[te.Concatenate[t.Any, P], R]], t.Callable[P, R]]": - """Create a decorator that passes a key from - :attr:`click.Context.meta` as the first argument to the decorated - function. - - :param key: Key in ``Context.meta`` to pass. - :param doc_description: Description of the object being passed, - inserted into the decorator's docstring. Defaults to "the 'key' - key from Context.meta". - - .. versionadded:: 8.0 - """ - - def decorator(f: "t.Callable[te.Concatenate[t.Any, P], R]") -> "t.Callable[P, R]": - def new_func(*args: "P.args", **kwargs: "P.kwargs") -> R: - ctx = get_current_context() - obj = ctx.meta[key] - return ctx.invoke(f, obj, *args, **kwargs) - - return update_wrapper(new_func, f) - - if doc_description is None: - doc_description = f"the {key!r} key from :attr:`click.Context.meta`" - - decorator.__doc__ = ( - f"Decorator that passes {doc_description} as the first argument" - " to the decorated function." - ) - return decorator # type: ignore[return-value] - - -CmdType = t.TypeVar("CmdType", bound=Command) - - -# variant: no call, directly as decorator for a function. -@t.overload -def command(name: _AnyCallable) -> Command: - ... - - -# variant: with positional name and with positional or keyword cls argument: -# @command(namearg, CommandCls, ...) or @command(namearg, cls=CommandCls, ...) -@t.overload -def command( - name: t.Optional[str], - cls: t.Type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: - ... - - -# variant: name omitted, cls _must_ be a keyword argument, @command(cls=CommandCls, ...) -@t.overload -def command( - name: None = None, - *, - cls: t.Type[CmdType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], CmdType]: - ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def command( - name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Command]: - ... - - -def command( - name: t.Union[t.Optional[str], _AnyCallable] = None, - cls: t.Optional[t.Type[CmdType]] = None, - **attrs: t.Any, -) -> t.Union[Command, t.Callable[[_AnyCallable], t.Union[Command, CmdType]]]: - r"""Creates a new :class:`Command` and uses the decorated function as - callback. This will also automatically attach all decorated - :func:`option`\s and :func:`argument`\s as parameters to the command. - - The name of the command defaults to the name of the function with - underscores replaced by dashes. If you want to change that, you can - pass the intended name as the first argument. - - All keyword arguments are forwarded to the underlying command class. - For the ``params`` argument, any decorated params are appended to - the end of the list. - - Once decorated the function turns into a :class:`Command` instance - that can be invoked as a command line utility or be attached to a - command :class:`Group`. - - :param name: the name of the command. This defaults to the function - name with underscores replaced by dashes. - :param cls: the command class to instantiate. This defaults to - :class:`Command`. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - - .. versionchanged:: 8.1 - The ``params`` argument can be used. Decorated params are - appended to the end of the list. - """ - - func: t.Optional[t.Callable[[_AnyCallable], t.Any]] = None - - if callable(name): - func = name - name = None - assert cls is None, "Use 'command(cls=cls)(callable)' to specify a class." - assert not attrs, "Use 'command(**kwargs)(callable)' to provide arguments." - - if cls is None: - cls = t.cast(t.Type[CmdType], Command) - - def decorator(f: _AnyCallable) -> CmdType: - if isinstance(f, Command): - raise TypeError("Attempted to convert a callback into a command twice.") - - attr_params = attrs.pop("params", None) - params = attr_params if attr_params is not None else [] - - try: - decorator_params = f.__click_params__ # type: ignore - except AttributeError: - pass - else: - del f.__click_params__ # type: ignore - params.extend(reversed(decorator_params)) - - if attrs.get("help") is None: - attrs["help"] = f.__doc__ - - if t.TYPE_CHECKING: - assert cls is not None - assert not callable(name) - - cmd = cls( - name=name or f.__name__.lower().replace("_", "-"), - callback=f, - params=params, - **attrs, - ) - cmd.__doc__ = f.__doc__ - return cmd - - if func is not None: - return decorator(func) - - return decorator - - -GrpType = t.TypeVar("GrpType", bound=Group) - - -# variant: no call, directly as decorator for a function. -@t.overload -def group(name: _AnyCallable) -> Group: - ... - - -# variant: with positional name and with positional or keyword cls argument: -# @group(namearg, GroupCls, ...) or @group(namearg, cls=GroupCls, ...) -@t.overload -def group( - name: t.Optional[str], - cls: t.Type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: - ... - - -# variant: name omitted, cls _must_ be a keyword argument, @group(cmd=GroupCls, ...) -@t.overload -def group( - name: None = None, - *, - cls: t.Type[GrpType], - **attrs: t.Any, -) -> t.Callable[[_AnyCallable], GrpType]: - ... - - -# variant: with optional string name, no cls argument provided. -@t.overload -def group( - name: t.Optional[str] = ..., cls: None = None, **attrs: t.Any -) -> t.Callable[[_AnyCallable], Group]: - ... - - -def group( - name: t.Union[str, _AnyCallable, None] = None, - cls: t.Optional[t.Type[GrpType]] = None, - **attrs: t.Any, -) -> t.Union[Group, t.Callable[[_AnyCallable], t.Union[Group, GrpType]]]: - """Creates a new :class:`Group` with a function as callback. This - works otherwise the same as :func:`command` just that the `cls` - parameter is set to :class:`Group`. - - .. versionchanged:: 8.1 - This decorator can be applied without parentheses. - """ - if cls is None: - cls = t.cast(t.Type[GrpType], Group) - - if callable(name): - return command(cls=cls, **attrs)(name) - - return command(name, cls, **attrs) - - -def _param_memo(f: t.Callable[..., t.Any], param: Parameter) -> None: - if isinstance(f, Command): - f.params.append(param) - else: - if not hasattr(f, "__click_params__"): - f.__click_params__ = [] # type: ignore - - f.__click_params__.append(param) # type: ignore - - -def argument( - *param_decls: str, cls: t.Optional[t.Type[Argument]] = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an argument to the command. All positional arguments are - passed as parameter declarations to :class:`Argument`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Argument` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default argument class, refer to :class:`Argument` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the argument class to instantiate. This defaults to - :class:`Argument`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Argument - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def option( - *param_decls: str, cls: t.Optional[t.Type[Option]] = None, **attrs: t.Any -) -> t.Callable[[FC], FC]: - """Attaches an option to the command. All positional arguments are - passed as parameter declarations to :class:`Option`; all keyword - arguments are forwarded unchanged (except ``cls``). - This is equivalent to creating an :class:`Option` instance manually - and attaching it to the :attr:`Command.params` list. - - For the default option class, refer to :class:`Option` and - :class:`Parameter` for descriptions of parameters. - - :param cls: the option class to instantiate. This defaults to - :class:`Option`. - :param param_decls: Passed as positional arguments to the constructor of - ``cls``. - :param attrs: Passed as keyword arguments to the constructor of ``cls``. - """ - if cls is None: - cls = Option - - def decorator(f: FC) -> FC: - _param_memo(f, cls(param_decls, **attrs)) - return f - - return decorator - - -def confirmation_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--yes`` option which shows a prompt before continuing if - not passed. If the prompt is declined, the program will exit. - - :param param_decls: One or more option names. Defaults to the single - value ``"--yes"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value: - ctx.abort() - - if not param_decls: - param_decls = ("--yes",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("callback", callback) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("prompt", "Do you want to continue?") - kwargs.setdefault("help", "Confirm the action without prompting.") - return option(*param_decls, **kwargs) - - -def password_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--password`` option which prompts for a password, hiding - input and asking to enter the value again for confirmation. - - :param param_decls: One or more option names. Defaults to the single - value ``"--password"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - if not param_decls: - param_decls = ("--password",) - - kwargs.setdefault("prompt", True) - kwargs.setdefault("confirmation_prompt", True) - kwargs.setdefault("hide_input", True) - return option(*param_decls, **kwargs) - - -def version_option( - version: t.Optional[str] = None, - *param_decls: str, - package_name: t.Optional[str] = None, - prog_name: t.Optional[str] = None, - message: t.Optional[str] = None, - **kwargs: t.Any, -) -> t.Callable[[FC], FC]: - """Add a ``--version`` option which immediately prints the version - number and exits the program. - - If ``version`` is not provided, Click will try to detect it using - :func:`importlib.metadata.version` to get the version for the - ``package_name``. On Python < 3.8, the ``importlib_metadata`` - backport must be installed. - - If ``package_name`` is not provided, Click will try to detect it by - inspecting the stack frames. This will be used to detect the - version, so it must match the name of the installed package. - - :param version: The version number to show. If not provided, Click - will try to detect it. - :param param_decls: One or more option names. Defaults to the single - value ``"--version"``. - :param package_name: The package name to detect the version from. If - not provided, Click will try to detect it. - :param prog_name: The name of the CLI to show in the message. If not - provided, it will be detected from the command. - :param message: The message to show. The values ``%(prog)s``, - ``%(package)s``, and ``%(version)s`` are available. Defaults to - ``"%(prog)s, version %(version)s"``. - :param kwargs: Extra arguments are passed to :func:`option`. - :raise RuntimeError: ``version`` could not be detected. - - .. versionchanged:: 8.0 - Add the ``package_name`` parameter, and the ``%(package)s`` - value for messages. - - .. versionchanged:: 8.0 - Use :mod:`importlib.metadata` instead of ``pkg_resources``. The - version is detected based on the package name, not the entry - point name. The Python package name must match the installed - package name, or be passed with ``package_name=``. - """ - if message is None: - message = _("%(prog)s, version %(version)s") - - if version is None and package_name is None: - frame = inspect.currentframe() - f_back = frame.f_back if frame is not None else None - f_globals = f_back.f_globals if f_back is not None else None - # break reference cycle - # https://docs.python.org/3/library/inspect.html#the-interpreter-stack - del frame - - if f_globals is not None: - package_name = f_globals.get("__name__") - - if package_name == "__main__": - package_name = f_globals.get("__package__") - - if package_name: - package_name = package_name.partition(".")[0] - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - nonlocal prog_name - nonlocal version - - if prog_name is None: - prog_name = ctx.find_root().info_name - - if version is None and package_name is not None: - metadata: t.Optional[types.ModuleType] - - try: - from importlib import metadata # type: ignore - except ImportError: - # Python < 3.8 - import importlib_metadata as metadata # type: ignore - - try: - version = metadata.version(package_name) # type: ignore - except metadata.PackageNotFoundError: # type: ignore - raise RuntimeError( - f"{package_name!r} is not installed. Try passing" - " 'package_name' instead." - ) from None - - if version is None: - raise RuntimeError( - f"Could not determine the version for {package_name!r} automatically." - ) - - echo( - message % {"prog": prog_name, "package": package_name, "version": version}, - color=ctx.color, - ) - ctx.exit() - - if not param_decls: - param_decls = ("--version",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show the version and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) - - -def help_option(*param_decls: str, **kwargs: t.Any) -> t.Callable[[FC], FC]: - """Add a ``--help`` option which immediately prints the help page - and exits the program. - - This is usually unnecessary, as the ``--help`` option is added to - each command automatically unless ``add_help_option=False`` is - passed. - - :param param_decls: One or more option names. Defaults to the single - value ``"--help"``. - :param kwargs: Extra arguments are passed to :func:`option`. - """ - - def callback(ctx: Context, param: Parameter, value: bool) -> None: - if not value or ctx.resilient_parsing: - return - - echo(ctx.get_help(), color=ctx.color) - ctx.exit() - - if not param_decls: - param_decls = ("--help",) - - kwargs.setdefault("is_flag", True) - kwargs.setdefault("expose_value", False) - kwargs.setdefault("is_eager", True) - kwargs.setdefault("help", _("Show this message and exit.")) - kwargs["callback"] = callback - return option(*param_decls, **kwargs) diff --git a/myenv/Lib/site-packages/click/exceptions.py b/myenv/Lib/site-packages/click/exceptions.py deleted file mode 100644 index fe68a36..0000000 --- a/myenv/Lib/site-packages/click/exceptions.py +++ /dev/null @@ -1,288 +0,0 @@ -import typing as t -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import get_text_stderr -from .utils import echo -from .utils import format_filename - -if t.TYPE_CHECKING: - from .core import Command - from .core import Context - from .core import Parameter - - -def _join_param_hints( - param_hint: t.Optional[t.Union[t.Sequence[str], str]] -) -> t.Optional[str]: - if param_hint is not None and not isinstance(param_hint, str): - return " / ".join(repr(x) for x in param_hint) - - return param_hint - - -class ClickException(Exception): - """An exception that Click can handle and show to the user.""" - - #: The exit code for this exception. - exit_code = 1 - - def __init__(self, message: str) -> None: - super().__init__(message) - self.message = message - - def format_message(self) -> str: - return self.message - - def __str__(self) -> str: - return self.message - - def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: - if file is None: - file = get_text_stderr() - - echo(_("Error: {message}").format(message=self.format_message()), file=file) - - -class UsageError(ClickException): - """An internal exception that signals a usage error. This typically - aborts any further handling. - - :param message: the error message to display. - :param ctx: optionally the context that caused this error. Click will - fill in the context automatically in some situations. - """ - - exit_code = 2 - - def __init__(self, message: str, ctx: t.Optional["Context"] = None) -> None: - super().__init__(message) - self.ctx = ctx - self.cmd: t.Optional["Command"] = self.ctx.command if self.ctx else None - - def show(self, file: t.Optional[t.IO[t.Any]] = None) -> None: - if file is None: - file = get_text_stderr() - color = None - hint = "" - if ( - self.ctx is not None - and self.ctx.command.get_help_option(self.ctx) is not None - ): - hint = _("Try '{command} {option}' for help.").format( - command=self.ctx.command_path, option=self.ctx.help_option_names[0] - ) - hint = f"{hint}\n" - if self.ctx is not None: - color = self.ctx.color - echo(f"{self.ctx.get_usage()}\n{hint}", file=file, color=color) - echo( - _("Error: {message}").format(message=self.format_message()), - file=file, - color=color, - ) - - -class BadParameter(UsageError): - """An exception that formats out a standardized error message for a - bad parameter. This is useful when thrown from a callback or type as - Click will attach contextual information to it (for instance, which - parameter it is). - - .. versionadded:: 2.0 - - :param param: the parameter object that caused this error. This can - be left out, and Click will attach this info itself - if possible. - :param param_hint: a string that shows up as parameter name. This - can be used as alternative to `param` in cases - where custom validation should happen. If it is - a string it's used as such, if it's a list then - each item is quoted and separated. - """ - - def __init__( - self, - message: str, - ctx: t.Optional["Context"] = None, - param: t.Optional["Parameter"] = None, - param_hint: t.Optional[str] = None, - ) -> None: - super().__init__(message, ctx) - self.param = param - self.param_hint = param_hint - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - return _("Invalid value: {message}").format(message=self.message) - - return _("Invalid value for {param_hint}: {message}").format( - param_hint=_join_param_hints(param_hint), message=self.message - ) - - -class MissingParameter(BadParameter): - """Raised if click required an option or argument but it was not - provided when invoking the script. - - .. versionadded:: 4.0 - - :param param_type: a string that indicates the type of the parameter. - The default is to inherit the parameter type from - the given `param`. Valid values are ``'parameter'``, - ``'option'`` or ``'argument'``. - """ - - def __init__( - self, - message: t.Optional[str] = None, - ctx: t.Optional["Context"] = None, - param: t.Optional["Parameter"] = None, - param_hint: t.Optional[str] = None, - param_type: t.Optional[str] = None, - ) -> None: - super().__init__(message or "", ctx, param, param_hint) - self.param_type = param_type - - def format_message(self) -> str: - if self.param_hint is not None: - param_hint: t.Optional[str] = self.param_hint - elif self.param is not None: - param_hint = self.param.get_error_hint(self.ctx) # type: ignore - else: - param_hint = None - - param_hint = _join_param_hints(param_hint) - param_hint = f" {param_hint}" if param_hint else "" - - param_type = self.param_type - if param_type is None and self.param is not None: - param_type = self.param.param_type_name - - msg = self.message - if self.param is not None: - msg_extra = self.param.type.get_missing_message(self.param) - if msg_extra: - if msg: - msg += f". {msg_extra}" - else: - msg = msg_extra - - msg = f" {msg}" if msg else "" - - # Translate param_type for known types. - if param_type == "argument": - missing = _("Missing argument") - elif param_type == "option": - missing = _("Missing option") - elif param_type == "parameter": - missing = _("Missing parameter") - else: - missing = _("Missing {param_type}").format(param_type=param_type) - - return f"{missing}{param_hint}.{msg}" - - def __str__(self) -> str: - if not self.message: - param_name = self.param.name if self.param else None - return _("Missing parameter: {param_name}").format(param_name=param_name) - else: - return self.message - - -class NoSuchOption(UsageError): - """Raised if click attempted to handle an option that does not - exist. - - .. versionadded:: 4.0 - """ - - def __init__( - self, - option_name: str, - message: t.Optional[str] = None, - possibilities: t.Optional[t.Sequence[str]] = None, - ctx: t.Optional["Context"] = None, - ) -> None: - if message is None: - message = _("No such option: {name}").format(name=option_name) - - super().__init__(message, ctx) - self.option_name = option_name - self.possibilities = possibilities - - def format_message(self) -> str: - if not self.possibilities: - return self.message - - possibility_str = ", ".join(sorted(self.possibilities)) - suggest = ngettext( - "Did you mean {possibility}?", - "(Possible options: {possibilities})", - len(self.possibilities), - ).format(possibility=possibility_str, possibilities=possibility_str) - return f"{self.message} {suggest}" - - -class BadOptionUsage(UsageError): - """Raised if an option is generally supplied but the use of the option - was incorrect. This is for instance raised if the number of arguments - for an option is not correct. - - .. versionadded:: 4.0 - - :param option_name: the name of the option being used incorrectly. - """ - - def __init__( - self, option_name: str, message: str, ctx: t.Optional["Context"] = None - ) -> None: - super().__init__(message, ctx) - self.option_name = option_name - - -class BadArgumentUsage(UsageError): - """Raised if an argument is generally supplied but the use of the argument - was incorrect. This is for instance raised if the number of values - for an argument is not correct. - - .. versionadded:: 6.0 - """ - - -class FileError(ClickException): - """Raised if a file cannot be opened.""" - - def __init__(self, filename: str, hint: t.Optional[str] = None) -> None: - if hint is None: - hint = _("unknown error") - - super().__init__(hint) - self.ui_filename: str = format_filename(filename) - self.filename = filename - - def format_message(self) -> str: - return _("Could not open file {filename!r}: {message}").format( - filename=self.ui_filename, message=self.message - ) - - -class Abort(RuntimeError): - """An internal signalling exception that signals Click to abort.""" - - -class Exit(RuntimeError): - """An exception that indicates that the application should exit with some - status code. - - :param code: the status code to exit with. - """ - - __slots__ = ("exit_code",) - - def __init__(self, code: int = 0) -> None: - self.exit_code: int = code diff --git a/myenv/Lib/site-packages/click/formatting.py b/myenv/Lib/site-packages/click/formatting.py deleted file mode 100644 index ddd2a2f..0000000 --- a/myenv/Lib/site-packages/click/formatting.py +++ /dev/null @@ -1,301 +0,0 @@ -import typing as t -from contextlib import contextmanager -from gettext import gettext as _ - -from ._compat import term_len -from .parser import split_opt - -# Can force a width. This is used by the test system -FORCED_WIDTH: t.Optional[int] = None - - -def measure_table(rows: t.Iterable[t.Tuple[str, str]]) -> t.Tuple[int, ...]: - widths: t.Dict[int, int] = {} - - for row in rows: - for idx, col in enumerate(row): - widths[idx] = max(widths.get(idx, 0), term_len(col)) - - return tuple(y for x, y in sorted(widths.items())) - - -def iter_rows( - rows: t.Iterable[t.Tuple[str, str]], col_count: int -) -> t.Iterator[t.Tuple[str, ...]]: - for row in rows: - yield row + ("",) * (col_count - len(row)) - - -def wrap_text( - text: str, - width: int = 78, - initial_indent: str = "", - subsequent_indent: str = "", - preserve_paragraphs: bool = False, -) -> str: - """A helper function that intelligently wraps text. By default, it - assumes that it operates on a single paragraph of text but if the - `preserve_paragraphs` parameter is provided it will intelligently - handle paragraphs (defined by two empty lines). - - If paragraphs are handled, a paragraph can be prefixed with an empty - line containing the ``\\b`` character (``\\x08``) to indicate that - no rewrapping should happen in that block. - - :param text: the text that should be rewrapped. - :param width: the maximum width for the text. - :param initial_indent: the initial indent that should be placed on the - first line as a string. - :param subsequent_indent: the indent string that should be placed on - each consecutive line. - :param preserve_paragraphs: if this flag is set then the wrapping will - intelligently handle paragraphs. - """ - from ._textwrap import TextWrapper - - text = text.expandtabs() - wrapper = TextWrapper( - width, - initial_indent=initial_indent, - subsequent_indent=subsequent_indent, - replace_whitespace=False, - ) - if not preserve_paragraphs: - return wrapper.fill(text) - - p: t.List[t.Tuple[int, bool, str]] = [] - buf: t.List[str] = [] - indent = None - - def _flush_par() -> None: - if not buf: - return - if buf[0].strip() == "\b": - p.append((indent or 0, True, "\n".join(buf[1:]))) - else: - p.append((indent or 0, False, " ".join(buf))) - del buf[:] - - for line in text.splitlines(): - if not line: - _flush_par() - indent = None - else: - if indent is None: - orig_len = term_len(line) - line = line.lstrip() - indent = orig_len - term_len(line) - buf.append(line) - _flush_par() - - rv = [] - for indent, raw, text in p: - with wrapper.extra_indent(" " * indent): - if raw: - rv.append(wrapper.indent_only(text)) - else: - rv.append(wrapper.fill(text)) - - return "\n\n".join(rv) - - -class HelpFormatter: - """This class helps with formatting text-based help pages. It's - usually just needed for very special internal cases, but it's also - exposed so that developers can write their own fancy outputs. - - At present, it always writes into memory. - - :param indent_increment: the additional increment for each level. - :param width: the width for the text. This defaults to the terminal - width clamped to a maximum of 78. - """ - - def __init__( - self, - indent_increment: int = 2, - width: t.Optional[int] = None, - max_width: t.Optional[int] = None, - ) -> None: - import shutil - - self.indent_increment = indent_increment - if max_width is None: - max_width = 80 - if width is None: - width = FORCED_WIDTH - if width is None: - width = max(min(shutil.get_terminal_size().columns, max_width) - 2, 50) - self.width = width - self.current_indent = 0 - self.buffer: t.List[str] = [] - - def write(self, string: str) -> None: - """Writes a unicode string into the internal buffer.""" - self.buffer.append(string) - - def indent(self) -> None: - """Increases the indentation.""" - self.current_indent += self.indent_increment - - def dedent(self) -> None: - """Decreases the indentation.""" - self.current_indent -= self.indent_increment - - def write_usage( - self, prog: str, args: str = "", prefix: t.Optional[str] = None - ) -> None: - """Writes a usage line into the buffer. - - :param prog: the program name. - :param args: whitespace separated list of arguments. - :param prefix: The prefix for the first line. Defaults to - ``"Usage: "``. - """ - if prefix is None: - prefix = f"{_('Usage:')} " - - usage_prefix = f"{prefix:>{self.current_indent}}{prog} " - text_width = self.width - self.current_indent - - if text_width >= (term_len(usage_prefix) + 20): - # The arguments will fit to the right of the prefix. - indent = " " * term_len(usage_prefix) - self.write( - wrap_text( - args, - text_width, - initial_indent=usage_prefix, - subsequent_indent=indent, - ) - ) - else: - # The prefix is too long, put the arguments on the next line. - self.write(usage_prefix) - self.write("\n") - indent = " " * (max(self.current_indent, term_len(prefix)) + 4) - self.write( - wrap_text( - args, text_width, initial_indent=indent, subsequent_indent=indent - ) - ) - - self.write("\n") - - def write_heading(self, heading: str) -> None: - """Writes a heading into the buffer.""" - self.write(f"{'':>{self.current_indent}}{heading}:\n") - - def write_paragraph(self) -> None: - """Writes a paragraph into the buffer.""" - if self.buffer: - self.write("\n") - - def write_text(self, text: str) -> None: - """Writes re-indented text into the buffer. This rewraps and - preserves paragraphs. - """ - indent = " " * self.current_indent - self.write( - wrap_text( - text, - self.width, - initial_indent=indent, - subsequent_indent=indent, - preserve_paragraphs=True, - ) - ) - self.write("\n") - - def write_dl( - self, - rows: t.Sequence[t.Tuple[str, str]], - col_max: int = 30, - col_spacing: int = 2, - ) -> None: - """Writes a definition list into the buffer. This is how options - and commands are usually formatted. - - :param rows: a list of two item tuples for the terms and values. - :param col_max: the maximum width of the first column. - :param col_spacing: the number of spaces between the first and - second column. - """ - rows = list(rows) - widths = measure_table(rows) - if len(widths) != 2: - raise TypeError("Expected two columns for definition list") - - first_col = min(widths[0], col_max) + col_spacing - - for first, second in iter_rows(rows, len(widths)): - self.write(f"{'':>{self.current_indent}}{first}") - if not second: - self.write("\n") - continue - if term_len(first) <= first_col - col_spacing: - self.write(" " * (first_col - term_len(first))) - else: - self.write("\n") - self.write(" " * (first_col + self.current_indent)) - - text_width = max(self.width - first_col - 2, 10) - wrapped_text = wrap_text(second, text_width, preserve_paragraphs=True) - lines = wrapped_text.splitlines() - - if lines: - self.write(f"{lines[0]}\n") - - for line in lines[1:]: - self.write(f"{'':>{first_col + self.current_indent}}{line}\n") - else: - self.write("\n") - - @contextmanager - def section(self, name: str) -> t.Iterator[None]: - """Helpful context manager that writes a paragraph, a heading, - and the indents. - - :param name: the section name that is written as heading. - """ - self.write_paragraph() - self.write_heading(name) - self.indent() - try: - yield - finally: - self.dedent() - - @contextmanager - def indentation(self) -> t.Iterator[None]: - """A context manager that increases the indentation.""" - self.indent() - try: - yield - finally: - self.dedent() - - def getvalue(self) -> str: - """Returns the buffer contents.""" - return "".join(self.buffer) - - -def join_options(options: t.Sequence[str]) -> t.Tuple[str, bool]: - """Given a list of option strings this joins them in the most appropriate - way and returns them in the form ``(formatted_string, - any_prefix_is_slash)`` where the second item in the tuple is a flag that - indicates if any of the option prefixes was a slash. - """ - rv = [] - any_prefix_is_slash = False - - for opt in options: - prefix = split_opt(opt)[0] - - if prefix == "/": - any_prefix_is_slash = True - - rv.append((len(prefix), opt)) - - rv.sort(key=lambda x: x[0]) - return ", ".join(x[1] for x in rv), any_prefix_is_slash diff --git a/myenv/Lib/site-packages/click/globals.py b/myenv/Lib/site-packages/click/globals.py deleted file mode 100644 index 480058f..0000000 --- a/myenv/Lib/site-packages/click/globals.py +++ /dev/null @@ -1,68 +0,0 @@ -import typing as t -from threading import local - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Context - -_local = local() - - -@t.overload -def get_current_context(silent: "te.Literal[False]" = False) -> "Context": - ... - - -@t.overload -def get_current_context(silent: bool = ...) -> t.Optional["Context"]: - ... - - -def get_current_context(silent: bool = False) -> t.Optional["Context"]: - """Returns the current click context. This can be used as a way to - access the current context object from anywhere. This is a more implicit - alternative to the :func:`pass_context` decorator. This function is - primarily useful for helpers such as :func:`echo` which might be - interested in changing its behavior based on the current context. - - To push the current context, :meth:`Context.scope` can be used. - - .. versionadded:: 5.0 - - :param silent: if set to `True` the return value is `None` if no context - is available. The default behavior is to raise a - :exc:`RuntimeError`. - """ - try: - return t.cast("Context", _local.stack[-1]) - except (AttributeError, IndexError) as e: - if not silent: - raise RuntimeError("There is no active click context.") from e - - return None - - -def push_context(ctx: "Context") -> None: - """Pushes a new context to the current stack.""" - _local.__dict__.setdefault("stack", []).append(ctx) - - -def pop_context() -> None: - """Removes the top level from the stack.""" - _local.stack.pop() - - -def resolve_color_default(color: t.Optional[bool] = None) -> t.Optional[bool]: - """Internal helper to get the default value of the color flag. If a - value is passed it's returned unchanged, otherwise it's looked up from - the current context. - """ - if color is not None: - return color - - ctx = get_current_context(silent=True) - - if ctx is not None: - return ctx.color - - return None diff --git a/myenv/Lib/site-packages/click/parser.py b/myenv/Lib/site-packages/click/parser.py deleted file mode 100644 index 5fa7adf..0000000 --- a/myenv/Lib/site-packages/click/parser.py +++ /dev/null @@ -1,529 +0,0 @@ -""" -This module started out as largely a copy paste from the stdlib's -optparse module with the features removed that we do not need from -optparse because we implement them in Click on a higher level (for -instance type handling, help formatting and a lot more). - -The plan is to remove more and more from here over time. - -The reason this is a different module and not optparse from the stdlib -is that there are differences in 2.x and 3.x about the error messages -generated and optparse in the stdlib uses gettext for no good reason -and might cause us issues. - -Click uses parts of optparse written by Gregory P. Ward and maintained -by the Python Software Foundation. This is limited to code in parser.py. - -Copyright 2001-2006 Gregory P. Ward. All rights reserved. -Copyright 2002-2006 Python Software Foundation. All rights reserved. -""" -# This code uses parts of optparse written by Gregory P. Ward and -# maintained by the Python Software Foundation. -# Copyright 2001-2006 Gregory P. Ward -# Copyright 2002-2006 Python Software Foundation -import typing as t -from collections import deque -from gettext import gettext as _ -from gettext import ngettext - -from .exceptions import BadArgumentUsage -from .exceptions import BadOptionUsage -from .exceptions import NoSuchOption -from .exceptions import UsageError - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Argument as CoreArgument - from .core import Context - from .core import Option as CoreOption - from .core import Parameter as CoreParameter - -V = t.TypeVar("V") - -# Sentinel value that indicates an option was passed as a flag without a -# value but is not a flag option. Option.consume_value uses this to -# prompt or use the flag_value. -_flag_needs_value = object() - - -def _unpack_args( - args: t.Sequence[str], nargs_spec: t.Sequence[int] -) -> t.Tuple[t.Sequence[t.Union[str, t.Sequence[t.Optional[str]], None]], t.List[str]]: - """Given an iterable of arguments and an iterable of nargs specifications, - it returns a tuple with all the unpacked arguments at the first index - and all remaining arguments as the second. - - The nargs specification is the number of arguments that should be consumed - or `-1` to indicate that this position should eat up all the remainders. - - Missing items are filled with `None`. - """ - args = deque(args) - nargs_spec = deque(nargs_spec) - rv: t.List[t.Union[str, t.Tuple[t.Optional[str], ...], None]] = [] - spos: t.Optional[int] = None - - def _fetch(c: "te.Deque[V]") -> t.Optional[V]: - try: - if spos is None: - return c.popleft() - else: - return c.pop() - except IndexError: - return None - - while nargs_spec: - nargs = _fetch(nargs_spec) - - if nargs is None: - continue - - if nargs == 1: - rv.append(_fetch(args)) - elif nargs > 1: - x = [_fetch(args) for _ in range(nargs)] - - # If we're reversed, we're pulling in the arguments in reverse, - # so we need to turn them around. - if spos is not None: - x.reverse() - - rv.append(tuple(x)) - elif nargs < 0: - if spos is not None: - raise TypeError("Cannot have two nargs < 0") - - spos = len(rv) - rv.append(None) - - # spos is the position of the wildcard (star). If it's not `None`, - # we fill it with the remainder. - if spos is not None: - rv[spos] = tuple(args) - args = [] - rv[spos + 1 :] = reversed(rv[spos + 1 :]) - - return tuple(rv), list(args) - - -def split_opt(opt: str) -> t.Tuple[str, str]: - first = opt[:1] - if first.isalnum(): - return "", opt - if opt[1:2] == first: - return opt[:2], opt[2:] - return first, opt[1:] - - -def normalize_opt(opt: str, ctx: t.Optional["Context"]) -> str: - if ctx is None or ctx.token_normalize_func is None: - return opt - prefix, opt = split_opt(opt) - return f"{prefix}{ctx.token_normalize_func(opt)}" - - -def split_arg_string(string: str) -> t.List[str]: - """Split an argument string as with :func:`shlex.split`, but don't - fail if the string is incomplete. Ignores a missing closing quote or - incomplete escape sequence and uses the partial token as-is. - - .. code-block:: python - - split_arg_string("example 'my file") - ["example", "my file"] - - split_arg_string("example my\\") - ["example", "my"] - - :param string: String to split. - """ - import shlex - - lex = shlex.shlex(string, posix=True) - lex.whitespace_split = True - lex.commenters = "" - out = [] - - try: - for token in lex: - out.append(token) - except ValueError: - # Raised when end-of-string is reached in an invalid state. Use - # the partial token as-is. The quote or escape character is in - # lex.state, not lex.token. - out.append(lex.token) - - return out - - -class Option: - def __init__( - self, - obj: "CoreOption", - opts: t.Sequence[str], - dest: t.Optional[str], - action: t.Optional[str] = None, - nargs: int = 1, - const: t.Optional[t.Any] = None, - ): - self._short_opts = [] - self._long_opts = [] - self.prefixes: t.Set[str] = set() - - for opt in opts: - prefix, value = split_opt(opt) - if not prefix: - raise ValueError(f"Invalid start character for option ({opt})") - self.prefixes.add(prefix[0]) - if len(prefix) == 1 and len(value) == 1: - self._short_opts.append(opt) - else: - self._long_opts.append(opt) - self.prefixes.add(prefix) - - if action is None: - action = "store" - - self.dest = dest - self.action = action - self.nargs = nargs - self.const = const - self.obj = obj - - @property - def takes_value(self) -> bool: - return self.action in ("store", "append") - - def process(self, value: t.Any, state: "ParsingState") -> None: - if self.action == "store": - state.opts[self.dest] = value # type: ignore - elif self.action == "store_const": - state.opts[self.dest] = self.const # type: ignore - elif self.action == "append": - state.opts.setdefault(self.dest, []).append(value) # type: ignore - elif self.action == "append_const": - state.opts.setdefault(self.dest, []).append(self.const) # type: ignore - elif self.action == "count": - state.opts[self.dest] = state.opts.get(self.dest, 0) + 1 # type: ignore - else: - raise ValueError(f"unknown action '{self.action}'") - state.order.append(self.obj) - - -class Argument: - def __init__(self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1): - self.dest = dest - self.nargs = nargs - self.obj = obj - - def process( - self, - value: t.Union[t.Optional[str], t.Sequence[t.Optional[str]]], - state: "ParsingState", - ) -> None: - if self.nargs > 1: - assert value is not None - holes = sum(1 for x in value if x is None) - if holes == len(value): - value = None - elif holes != 0: - raise BadArgumentUsage( - _("Argument {name!r} takes {nargs} values.").format( - name=self.dest, nargs=self.nargs - ) - ) - - if self.nargs == -1 and self.obj.envvar is not None and value == (): - # Replace empty tuple with None so that a value from the - # environment may be tried. - value = None - - state.opts[self.dest] = value # type: ignore - state.order.append(self.obj) - - -class ParsingState: - def __init__(self, rargs: t.List[str]) -> None: - self.opts: t.Dict[str, t.Any] = {} - self.largs: t.List[str] = [] - self.rargs = rargs - self.order: t.List["CoreParameter"] = [] - - -class OptionParser: - """The option parser is an internal class that is ultimately used to - parse options and arguments. It's modelled after optparse and brings - a similar but vastly simplified API. It should generally not be used - directly as the high level Click classes wrap it for you. - - It's not nearly as extensible as optparse or argparse as it does not - implement features that are implemented on a higher level (such as - types or defaults). - - :param ctx: optionally the :class:`~click.Context` where this parser - should go with. - """ - - def __init__(self, ctx: t.Optional["Context"] = None) -> None: - #: The :class:`~click.Context` for this parser. This might be - #: `None` for some advanced use cases. - self.ctx = ctx - #: This controls how the parser deals with interspersed arguments. - #: If this is set to `False`, the parser will stop on the first - #: non-option. Click uses this to implement nested subcommands - #: safely. - self.allow_interspersed_args: bool = True - #: This tells the parser how to deal with unknown options. By - #: default it will error out (which is sensible), but there is a - #: second mode where it will ignore it and continue processing - #: after shifting all the unknown options into the resulting args. - self.ignore_unknown_options: bool = False - - if ctx is not None: - self.allow_interspersed_args = ctx.allow_interspersed_args - self.ignore_unknown_options = ctx.ignore_unknown_options - - self._short_opt: t.Dict[str, Option] = {} - self._long_opt: t.Dict[str, Option] = {} - self._opt_prefixes = {"-", "--"} - self._args: t.List[Argument] = [] - - def add_option( - self, - obj: "CoreOption", - opts: t.Sequence[str], - dest: t.Optional[str], - action: t.Optional[str] = None, - nargs: int = 1, - const: t.Optional[t.Any] = None, - ) -> None: - """Adds a new option named `dest` to the parser. The destination - is not inferred (unlike with optparse) and needs to be explicitly - provided. Action can be any of ``store``, ``store_const``, - ``append``, ``append_const`` or ``count``. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - opts = [normalize_opt(opt, self.ctx) for opt in opts] - option = Option(obj, opts, dest, action=action, nargs=nargs, const=const) - self._opt_prefixes.update(option.prefixes) - for opt in option._short_opts: - self._short_opt[opt] = option - for opt in option._long_opts: - self._long_opt[opt] = option - - def add_argument( - self, obj: "CoreArgument", dest: t.Optional[str], nargs: int = 1 - ) -> None: - """Adds a positional argument named `dest` to the parser. - - The `obj` can be used to identify the option in the order list - that is returned from the parser. - """ - self._args.append(Argument(obj, dest=dest, nargs=nargs)) - - def parse_args( - self, args: t.List[str] - ) -> t.Tuple[t.Dict[str, t.Any], t.List[str], t.List["CoreParameter"]]: - """Parses positional arguments and returns ``(values, args, order)`` - for the parsed options and arguments as well as the leftover - arguments if there are any. The order is a list of objects as they - appear on the command line. If arguments appear multiple times they - will be memorized multiple times as well. - """ - state = ParsingState(args) - try: - self._process_args_for_options(state) - self._process_args_for_args(state) - except UsageError: - if self.ctx is None or not self.ctx.resilient_parsing: - raise - return state.opts, state.largs, state.order - - def _process_args_for_args(self, state: ParsingState) -> None: - pargs, args = _unpack_args( - state.largs + state.rargs, [x.nargs for x in self._args] - ) - - for idx, arg in enumerate(self._args): - arg.process(pargs[idx], state) - - state.largs = args - state.rargs = [] - - def _process_args_for_options(self, state: ParsingState) -> None: - while state.rargs: - arg = state.rargs.pop(0) - arglen = len(arg) - # Double dashes always handled explicitly regardless of what - # prefixes are valid. - if arg == "--": - return - elif arg[:1] in self._opt_prefixes and arglen > 1: - self._process_opts(arg, state) - elif self.allow_interspersed_args: - state.largs.append(arg) - else: - state.rargs.insert(0, arg) - return - - # Say this is the original argument list: - # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] - # ^ - # (we are about to process arg(i)). - # - # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of - # [arg0, ..., arg(i-1)] (any options and their arguments will have - # been removed from largs). - # - # The while loop will usually consume 1 or more arguments per pass. - # If it consumes 1 (eg. arg is an option that takes no arguments), - # then after _process_arg() is done the situation is: - # - # largs = subset of [arg0, ..., arg(i)] - # rargs = [arg(i+1), ..., arg(N-1)] - # - # If allow_interspersed_args is false, largs will always be - # *empty* -- still a subset of [arg0, ..., arg(i-1)], but - # not a very interesting subset! - - def _match_long_opt( - self, opt: str, explicit_value: t.Optional[str], state: ParsingState - ) -> None: - if opt not in self._long_opt: - from difflib import get_close_matches - - possibilities = get_close_matches(opt, self._long_opt) - raise NoSuchOption(opt, possibilities=possibilities, ctx=self.ctx) - - option = self._long_opt[opt] - if option.takes_value: - # At this point it's safe to modify rargs by injecting the - # explicit value, because no exception is raised in this - # branch. This means that the inserted value will be fully - # consumed. - if explicit_value is not None: - state.rargs.insert(0, explicit_value) - - value = self._get_value_from_state(opt, option, state) - - elif explicit_value is not None: - raise BadOptionUsage( - opt, _("Option {name!r} does not take a value.").format(name=opt) - ) - - else: - value = None - - option.process(value, state) - - def _match_short_opt(self, arg: str, state: ParsingState) -> None: - stop = False - i = 1 - prefix = arg[0] - unknown_options = [] - - for ch in arg[1:]: - opt = normalize_opt(f"{prefix}{ch}", self.ctx) - option = self._short_opt.get(opt) - i += 1 - - if not option: - if self.ignore_unknown_options: - unknown_options.append(ch) - continue - raise NoSuchOption(opt, ctx=self.ctx) - if option.takes_value: - # Any characters left in arg? Pretend they're the - # next arg, and stop consuming characters of arg. - if i < len(arg): - state.rargs.insert(0, arg[i:]) - stop = True - - value = self._get_value_from_state(opt, option, state) - - else: - value = None - - option.process(value, state) - - if stop: - break - - # If we got any unknown options we recombine the string of the - # remaining options and re-attach the prefix, then report that - # to the state as new larg. This way there is basic combinatorics - # that can be achieved while still ignoring unknown arguments. - if self.ignore_unknown_options and unknown_options: - state.largs.append(f"{prefix}{''.join(unknown_options)}") - - def _get_value_from_state( - self, option_name: str, option: Option, state: ParsingState - ) -> t.Any: - nargs = option.nargs - - if len(state.rargs) < nargs: - if option.obj._flag_needs_value: - # Option allows omitting the value. - value = _flag_needs_value - else: - raise BadOptionUsage( - option_name, - ngettext( - "Option {name!r} requires an argument.", - "Option {name!r} requires {nargs} arguments.", - nargs, - ).format(name=option_name, nargs=nargs), - ) - elif nargs == 1: - next_rarg = state.rargs[0] - - if ( - option.obj._flag_needs_value - and isinstance(next_rarg, str) - and next_rarg[:1] in self._opt_prefixes - and len(next_rarg) > 1 - ): - # The next arg looks like the start of an option, don't - # use it as the value if omitting the value is allowed. - value = _flag_needs_value - else: - value = state.rargs.pop(0) - else: - value = tuple(state.rargs[:nargs]) - del state.rargs[:nargs] - - return value - - def _process_opts(self, arg: str, state: ParsingState) -> None: - explicit_value = None - # Long option handling happens in two parts. The first part is - # supporting explicitly attached values. In any case, we will try - # to long match the option first. - if "=" in arg: - long_opt, explicit_value = arg.split("=", 1) - else: - long_opt = arg - norm_long_opt = normalize_opt(long_opt, self.ctx) - - # At this point we will match the (assumed) long option through - # the long option matching code. Note that this allows options - # like "-foo" to be matched as long options. - try: - self._match_long_opt(norm_long_opt, explicit_value, state) - except NoSuchOption: - # At this point the long option matching failed, and we need - # to try with short options. However there is a special rule - # which says, that if we have a two character options prefix - # (applies to "--foo" for instance), we do not dispatch to the - # short option code and will instead raise the no option - # error. - if arg[:2] not in self._opt_prefixes: - self._match_short_opt(arg, state) - return - - if not self.ignore_unknown_options: - raise - - state.largs.append(arg) diff --git a/myenv/Lib/site-packages/click/py.typed b/myenv/Lib/site-packages/click/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/click/shell_completion.py b/myenv/Lib/site-packages/click/shell_completion.py deleted file mode 100644 index dc9e00b..0000000 --- a/myenv/Lib/site-packages/click/shell_completion.py +++ /dev/null @@ -1,596 +0,0 @@ -import os -import re -import typing as t -from gettext import gettext as _ - -from .core import Argument -from .core import BaseCommand -from .core import Context -from .core import MultiCommand -from .core import Option -from .core import Parameter -from .core import ParameterSource -from .parser import split_arg_string -from .utils import echo - - -def shell_complete( - cli: BaseCommand, - ctx_args: t.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - instruction: str, -) -> int: - """Perform shell completion for the given CLI program. - - :param cli: Command being called. - :param ctx_args: Extra arguments to pass to - ``cli.make_context``. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - :param instruction: Value of ``complete_var`` with the completion - instruction and shell, in the form ``instruction_shell``. - :return: Status code to exit with. - """ - shell, _, instruction = instruction.partition("_") - comp_cls = get_completion_class(shell) - - if comp_cls is None: - return 1 - - comp = comp_cls(cli, ctx_args, prog_name, complete_var) - - if instruction == "source": - echo(comp.source()) - return 0 - - if instruction == "complete": - echo(comp.complete()) - return 0 - - return 1 - - -class CompletionItem: - """Represents a completion value and metadata about the value. The - default metadata is ``type`` to indicate special shell handling, - and ``help`` if a shell supports showing a help string next to the - value. - - Arbitrary parameters can be passed when creating the object, and - accessed using ``item.attr``. If an attribute wasn't passed, - accessing it returns ``None``. - - :param value: The completion suggestion. - :param type: Tells the shell script to provide special completion - support for the type. Click uses ``"dir"`` and ``"file"``. - :param help: String shown next to the value if supported. - :param kwargs: Arbitrary metadata. The built-in implementations - don't use this, but custom type completions paired with custom - shell support could use it. - """ - - __slots__ = ("value", "type", "help", "_info") - - def __init__( - self, - value: t.Any, - type: str = "plain", - help: t.Optional[str] = None, - **kwargs: t.Any, - ) -> None: - self.value: t.Any = value - self.type: str = type - self.help: t.Optional[str] = help - self._info = kwargs - - def __getattr__(self, name: str) -> t.Any: - return self._info.get(name) - - -# Only Bash >= 4.4 has the nosort option. -_SOURCE_BASH = """\ -%(complete_func)s() { - local IFS=$'\\n' - local response - - response=$(env COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \ -%(complete_var)s=bash_complete $1) - - for completion in $response; do - IFS=',' read type value <<< "$completion" - - if [[ $type == 'dir' ]]; then - COMPREPLY=() - compopt -o dirnames - elif [[ $type == 'file' ]]; then - COMPREPLY=() - compopt -o default - elif [[ $type == 'plain' ]]; then - COMPREPLY+=($value) - fi - done - - return 0 -} - -%(complete_func)s_setup() { - complete -o nosort -F %(complete_func)s %(prog_name)s -} - -%(complete_func)s_setup; -""" - -_SOURCE_ZSH = """\ -#compdef %(prog_name)s - -%(complete_func)s() { - local -a completions - local -a completions_with_descriptions - local -a response - (( ! $+commands[%(prog_name)s] )) && return 1 - - response=("${(@f)$(env COMP_WORDS="${words[*]}" COMP_CWORD=$((CURRENT-1)) \ -%(complete_var)s=zsh_complete %(prog_name)s)}") - - for type key descr in ${response}; do - if [[ "$type" == "plain" ]]; then - if [[ "$descr" == "_" ]]; then - completions+=("$key") - else - completions_with_descriptions+=("$key":"$descr") - fi - elif [[ "$type" == "dir" ]]; then - _path_files -/ - elif [[ "$type" == "file" ]]; then - _path_files -f - fi - done - - if [ -n "$completions_with_descriptions" ]; then - _describe -V unsorted completions_with_descriptions -U - fi - - if [ -n "$completions" ]; then - compadd -U -V unsorted -a completions - fi -} - -if [[ $zsh_eval_context[-1] == loadautofunc ]]; then - # autoload from fpath, call function directly - %(complete_func)s "$@" -else - # eval/source/. command, register function for later - compdef %(complete_func)s %(prog_name)s -fi -""" - -_SOURCE_FISH = """\ -function %(complete_func)s; - set -l response (env %(complete_var)s=fish_complete COMP_WORDS=(commandline -cp) \ -COMP_CWORD=(commandline -t) %(prog_name)s); - - for completion in $response; - set -l metadata (string split "," $completion); - - if test $metadata[1] = "dir"; - __fish_complete_directories $metadata[2]; - else if test $metadata[1] = "file"; - __fish_complete_path $metadata[2]; - else if test $metadata[1] = "plain"; - echo $metadata[2]; - end; - end; -end; - -complete --no-files --command %(prog_name)s --arguments \ -"(%(complete_func)s)"; -""" - - -class ShellComplete: - """Base class for providing shell completion support. A subclass for - a given shell will override attributes and methods to implement the - completion instructions (``source`` and ``complete``). - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param complete_var: Name of the environment variable that holds - the completion instruction. - - .. versionadded:: 8.0 - """ - - name: t.ClassVar[str] - """Name to register the shell as with :func:`add_completion_class`. - This is used in completion instructions (``{name}_source`` and - ``{name}_complete``). - """ - - source_template: t.ClassVar[str] - """Completion script template formatted by :meth:`source`. This must - be provided by subclasses. - """ - - def __init__( - self, - cli: BaseCommand, - ctx_args: t.MutableMapping[str, t.Any], - prog_name: str, - complete_var: str, - ) -> None: - self.cli = cli - self.ctx_args = ctx_args - self.prog_name = prog_name - self.complete_var = complete_var - - @property - def func_name(self) -> str: - """The name of the shell function defined by the completion - script. - """ - safe_name = re.sub(r"\W*", "", self.prog_name.replace("-", "_"), flags=re.ASCII) - return f"_{safe_name}_completion" - - def source_vars(self) -> t.Dict[str, t.Any]: - """Vars for formatting :attr:`source_template`. - - By default this provides ``complete_func``, ``complete_var``, - and ``prog_name``. - """ - return { - "complete_func": self.func_name, - "complete_var": self.complete_var, - "prog_name": self.prog_name, - } - - def source(self) -> str: - """Produce the shell script that defines the completion - function. By default this ``%``-style formats - :attr:`source_template` with the dict returned by - :meth:`source_vars`. - """ - return self.source_template % self.source_vars() - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - """Use the env vars defined by the shell script to return a - tuple of ``args, incomplete``. This must be implemented by - subclasses. - """ - raise NotImplementedError - - def get_completions( - self, args: t.List[str], incomplete: str - ) -> t.List[CompletionItem]: - """Determine the context and last complete command or parameter - from the complete args. Call that object's ``shell_complete`` - method to get the completions for the incomplete value. - - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - ctx = _resolve_context(self.cli, self.ctx_args, self.prog_name, args) - obj, incomplete = _resolve_incomplete(ctx, args, incomplete) - return obj.shell_complete(ctx, incomplete) - - def format_completion(self, item: CompletionItem) -> str: - """Format a completion item into the form recognized by the - shell script. This must be implemented by subclasses. - - :param item: Completion item to format. - """ - raise NotImplementedError - - def complete(self) -> str: - """Produce the completion data to send back to the shell. - - By default this calls :meth:`get_completion_args`, gets the - completions, then calls :meth:`format_completion` for each - completion. - """ - args, incomplete = self.get_completion_args() - completions = self.get_completions(args, incomplete) - out = [self.format_completion(item) for item in completions] - return "\n".join(out) - - -class BashComplete(ShellComplete): - """Shell completion for Bash.""" - - name = "bash" - source_template = _SOURCE_BASH - - @staticmethod - def _check_version() -> None: - import subprocess - - output = subprocess.run( - ["bash", "-c", 'echo "${BASH_VERSION}"'], stdout=subprocess.PIPE - ) - match = re.search(r"^(\d+)\.(\d+)\.\d+", output.stdout.decode()) - - if match is not None: - major, minor = match.groups() - - if major < "4" or major == "4" and minor < "4": - echo( - _( - "Shell completion is not supported for Bash" - " versions older than 4.4." - ), - err=True, - ) - else: - echo( - _("Couldn't detect Bash version, shell completion is not supported."), - err=True, - ) - - def source(self) -> str: - self._check_version() - return super().source() - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type},{item.value}" - - -class ZshComplete(ShellComplete): - """Shell completion for Zsh.""" - - name = "zsh" - source_template = _SOURCE_ZSH - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - cword = int(os.environ["COMP_CWORD"]) - args = cwords[1:cword] - - try: - incomplete = cwords[cword] - except IndexError: - incomplete = "" - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - return f"{item.type}\n{item.value}\n{item.help if item.help else '_'}" - - -class FishComplete(ShellComplete): - """Shell completion for Fish.""" - - name = "fish" - source_template = _SOURCE_FISH - - def get_completion_args(self) -> t.Tuple[t.List[str], str]: - cwords = split_arg_string(os.environ["COMP_WORDS"]) - incomplete = os.environ["COMP_CWORD"] - args = cwords[1:] - - # Fish stores the partial word in both COMP_WORDS and - # COMP_CWORD, remove it from complete args. - if incomplete and args and args[-1] == incomplete: - args.pop() - - return args, incomplete - - def format_completion(self, item: CompletionItem) -> str: - if item.help: - return f"{item.type},{item.value}\t{item.help}" - - return f"{item.type},{item.value}" - - -ShellCompleteType = t.TypeVar("ShellCompleteType", bound=t.Type[ShellComplete]) - - -_available_shells: t.Dict[str, t.Type[ShellComplete]] = { - "bash": BashComplete, - "fish": FishComplete, - "zsh": ZshComplete, -} - - -def add_completion_class( - cls: ShellCompleteType, name: t.Optional[str] = None -) -> ShellCompleteType: - """Register a :class:`ShellComplete` subclass under the given name. - The name will be provided by the completion instruction environment - variable during completion. - - :param cls: The completion class that will handle completion for the - shell. - :param name: Name to register the class under. Defaults to the - class's ``name`` attribute. - """ - if name is None: - name = cls.name - - _available_shells[name] = cls - - return cls - - -def get_completion_class(shell: str) -> t.Optional[t.Type[ShellComplete]]: - """Look up a registered :class:`ShellComplete` subclass by the name - provided by the completion instruction environment variable. If the - name isn't registered, returns ``None``. - - :param shell: Name the class is registered under. - """ - return _available_shells.get(shell) - - -def _is_incomplete_argument(ctx: Context, param: Parameter) -> bool: - """Determine if the given parameter is an argument that can still - accept values. - - :param ctx: Invocation context for the command represented by the - parsed complete args. - :param param: Argument object being checked. - """ - if not isinstance(param, Argument): - return False - - assert param.name is not None - # Will be None if expose_value is False. - value = ctx.params.get(param.name) - return ( - param.nargs == -1 - or ctx.get_parameter_source(param.name) is not ParameterSource.COMMANDLINE - or ( - param.nargs > 1 - and isinstance(value, (tuple, list)) - and len(value) < param.nargs - ) - ) - - -def _start_of_option(ctx: Context, value: str) -> bool: - """Check if the value looks like the start of an option.""" - if not value: - return False - - c = value[0] - return c in ctx._opt_prefixes - - -def _is_incomplete_option(ctx: Context, args: t.List[str], param: Parameter) -> bool: - """Determine if the given parameter is an option that needs a value. - - :param args: List of complete args before the incomplete value. - :param param: Option object being checked. - """ - if not isinstance(param, Option): - return False - - if param.is_flag or param.count: - return False - - last_option = None - - for index, arg in enumerate(reversed(args)): - if index + 1 > param.nargs: - break - - if _start_of_option(ctx, arg): - last_option = arg - - return last_option is not None and last_option in param.opts - - -def _resolve_context( - cli: BaseCommand, - ctx_args: t.MutableMapping[str, t.Any], - prog_name: str, - args: t.List[str], -) -> Context: - """Produce the context hierarchy starting with the command and - traversing the complete arguments. This only follows the commands, - it doesn't trigger input prompts or callbacks. - - :param cli: Command being called. - :param prog_name: Name of the executable in the shell. - :param args: List of complete args before the incomplete value. - """ - ctx_args["resilient_parsing"] = True - ctx = cli.make_context(prog_name, args.copy(), **ctx_args) - args = ctx.protected_args + ctx.args - - while args: - command = ctx.command - - if isinstance(command, MultiCommand): - if not command.chain: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - ctx = cmd.make_context(name, args, parent=ctx, resilient_parsing=True) - args = ctx.protected_args + ctx.args - else: - sub_ctx = ctx - - while args: - name, cmd, args = command.resolve_command(ctx, args) - - if cmd is None: - return ctx - - sub_ctx = cmd.make_context( - name, - args, - parent=ctx, - allow_extra_args=True, - allow_interspersed_args=False, - resilient_parsing=True, - ) - args = sub_ctx.args - - ctx = sub_ctx - args = [*sub_ctx.protected_args, *sub_ctx.args] - else: - break - - return ctx - - -def _resolve_incomplete( - ctx: Context, args: t.List[str], incomplete: str -) -> t.Tuple[t.Union[BaseCommand, Parameter], str]: - """Find the Click object that will handle the completion of the - incomplete value. Return the object and the incomplete value. - - :param ctx: Invocation context for the command represented by - the parsed complete args. - :param args: List of complete args before the incomplete value. - :param incomplete: Value being completed. May be empty. - """ - # Different shells treat an "=" between a long option name and - # value differently. Might keep the value joined, return the "=" - # as a separate item, or return the split name and value. Always - # split and discard the "=" to make completion easier. - if incomplete == "=": - incomplete = "" - elif "=" in incomplete and _start_of_option(ctx, incomplete): - name, _, incomplete = incomplete.partition("=") - args.append(name) - - # The "--" marker tells Click to stop treating values as options - # even if they start with the option character. If it hasn't been - # given and the incomplete arg looks like an option, the current - # command will provide option name completions. - if "--" not in args and _start_of_option(ctx, incomplete): - return ctx.command, incomplete - - params = ctx.command.get_params(ctx) - - # If the last complete arg is an option name with an incomplete - # value, the option will provide value completions. - for param in params: - if _is_incomplete_option(ctx, args, param): - return param, incomplete - - # It's not an option name or value. The first argument without a - # parsed value will provide value completions. - for param in params: - if _is_incomplete_argument(ctx, param): - return param, incomplete - - # There were no unparsed arguments, the command may be a group that - # will provide command name completions. - return ctx.command, incomplete diff --git a/myenv/Lib/site-packages/click/termui.py b/myenv/Lib/site-packages/click/termui.py deleted file mode 100644 index db7a4b2..0000000 --- a/myenv/Lib/site-packages/click/termui.py +++ /dev/null @@ -1,784 +0,0 @@ -import inspect -import io -import itertools -import sys -import typing as t -from gettext import gettext as _ - -from ._compat import isatty -from ._compat import strip_ansi -from .exceptions import Abort -from .exceptions import UsageError -from .globals import resolve_color_default -from .types import Choice -from .types import convert_type -from .types import ParamType -from .utils import echo -from .utils import LazyFile - -if t.TYPE_CHECKING: - from ._termui_impl import ProgressBar - -V = t.TypeVar("V") - -# The prompt functions to use. The doc tools currently override these -# functions to customize how they work. -visible_prompt_func: t.Callable[[str], str] = input - -_ansi_colors = { - "black": 30, - "red": 31, - "green": 32, - "yellow": 33, - "blue": 34, - "magenta": 35, - "cyan": 36, - "white": 37, - "reset": 39, - "bright_black": 90, - "bright_red": 91, - "bright_green": 92, - "bright_yellow": 93, - "bright_blue": 94, - "bright_magenta": 95, - "bright_cyan": 96, - "bright_white": 97, -} -_ansi_reset_all = "\033[0m" - - -def hidden_prompt_func(prompt: str) -> str: - import getpass - - return getpass.getpass(prompt) - - -def _build_prompt( - text: str, - suffix: str, - show_default: bool = False, - default: t.Optional[t.Any] = None, - show_choices: bool = True, - type: t.Optional[ParamType] = None, -) -> str: - prompt = text - if type is not None and show_choices and isinstance(type, Choice): - prompt += f" ({', '.join(map(str, type.choices))})" - if default is not None and show_default: - prompt = f"{prompt} [{_format_default(default)}]" - return f"{prompt}{suffix}" - - -def _format_default(default: t.Any) -> t.Any: - if isinstance(default, (io.IOBase, LazyFile)) and hasattr(default, "name"): - return default.name - - return default - - -def prompt( - text: str, - default: t.Optional[t.Any] = None, - hide_input: bool = False, - confirmation_prompt: t.Union[bool, str] = False, - type: t.Optional[t.Union[ParamType, t.Any]] = None, - value_proc: t.Optional[t.Callable[[str], t.Any]] = None, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, - show_choices: bool = True, -) -> t.Any: - """Prompts a user for input. This is a convenience function that can - be used to prompt a user for input later. - - If the user aborts the input by sending an interrupt signal, this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the text to show for the prompt. - :param default: the default value to use if no input happens. If this - is not given it will prompt until it's aborted. - :param hide_input: if this is set to true then the input value will - be hidden. - :param confirmation_prompt: Prompt a second time to confirm the - value. Can be set to a string instead of ``True`` to customize - the message. - :param type: the type to use to check the value against. - :param value_proc: if this parameter is provided it's a function that - is invoked instead of the type conversion to - convert a value. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - :param show_choices: Show or hide choices if the passed type is a Choice. - For example if type is a Choice of either day or week, - show_choices is true and text is "Group by" then the - prompt will be "Group by (day, week): ". - - .. versionadded:: 8.0 - ``confirmation_prompt`` can be a custom string. - - .. versionadded:: 7.0 - Added the ``show_choices`` parameter. - - .. versionadded:: 6.0 - Added unicode support for cmd.exe on Windows. - - .. versionadded:: 4.0 - Added the `err` parameter. - - """ - - def prompt_func(text: str) -> str: - f = hidden_prompt_func if hide_input else visible_prompt_func - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(text.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - return f(" ") - except (KeyboardInterrupt, EOFError): - # getpass doesn't print a newline if the user aborts input with ^C. - # Allegedly this behavior is inherited from getpass(3). - # A doc bug has been filed at https://bugs.python.org/issue24711 - if hide_input: - echo(None, err=err) - raise Abort() from None - - if value_proc is None: - value_proc = convert_type(type, default) - - prompt = _build_prompt( - text, prompt_suffix, show_default, default, show_choices, type - ) - - if confirmation_prompt: - if confirmation_prompt is True: - confirmation_prompt = _("Repeat for confirmation") - - confirmation_prompt = _build_prompt(confirmation_prompt, prompt_suffix) - - while True: - while True: - value = prompt_func(prompt) - if value: - break - elif default is not None: - value = default - break - try: - result = value_proc(value) - except UsageError as e: - if hide_input: - echo(_("Error: The value you entered was invalid."), err=err) - else: - echo(_("Error: {e.message}").format(e=e), err=err) # noqa: B306 - continue - if not confirmation_prompt: - return result - while True: - value2 = prompt_func(confirmation_prompt) - is_empty = not value and not value2 - if value2 or is_empty: - break - if value == value2: - return result - echo(_("Error: The two entered values do not match."), err=err) - - -def confirm( - text: str, - default: t.Optional[bool] = False, - abort: bool = False, - prompt_suffix: str = ": ", - show_default: bool = True, - err: bool = False, -) -> bool: - """Prompts for confirmation (yes/no question). - - If the user aborts the input by sending a interrupt signal this - function will catch it and raise a :exc:`Abort` exception. - - :param text: the question to ask. - :param default: The default value to use when no input is given. If - ``None``, repeat until input is given. - :param abort: if this is set to `True` a negative answer aborts the - exception by raising :exc:`Abort`. - :param prompt_suffix: a suffix that should be added to the prompt. - :param show_default: shows or hides the default value in the prompt. - :param err: if set to true the file defaults to ``stderr`` instead of - ``stdout``, the same as with echo. - - .. versionchanged:: 8.0 - Repeat until input is given if ``default`` is ``None``. - - .. versionadded:: 4.0 - Added the ``err`` parameter. - """ - prompt = _build_prompt( - text, - prompt_suffix, - show_default, - "y/n" if default is None else ("Y/n" if default else "y/N"), - ) - - while True: - try: - # Write the prompt separately so that we get nice - # coloring through colorama on Windows - echo(prompt.rstrip(" "), nl=False, err=err) - # Echo a space to stdout to work around an issue where - # readline causes backspace to clear the whole line. - value = visible_prompt_func(" ").lower().strip() - except (KeyboardInterrupt, EOFError): - raise Abort() from None - if value in ("y", "yes"): - rv = True - elif value in ("n", "no"): - rv = False - elif default is not None and value == "": - rv = default - else: - echo(_("Error: invalid input"), err=err) - continue - break - if abort and not rv: - raise Abort() - return rv - - -def echo_via_pager( - text_or_generator: t.Union[t.Iterable[str], t.Callable[[], t.Iterable[str]], str], - color: t.Optional[bool] = None, -) -> None: - """This function takes a text and shows it via an environment specific - pager on stdout. - - .. versionchanged:: 3.0 - Added the `color` flag. - - :param text_or_generator: the text to page, or alternatively, a - generator emitting the text to page. - :param color: controls if the pager supports ANSI colors or not. The - default is autodetection. - """ - color = resolve_color_default(color) - - if inspect.isgeneratorfunction(text_or_generator): - i = t.cast(t.Callable[[], t.Iterable[str]], text_or_generator)() - elif isinstance(text_or_generator, str): - i = [text_or_generator] - else: - i = iter(t.cast(t.Iterable[str], text_or_generator)) - - # convert every element of i to a text type if necessary - text_generator = (el if isinstance(el, str) else str(el) for el in i) - - from ._termui_impl import pager - - return pager(itertools.chain(text_generator, "\n"), color) - - -def progressbar( - iterable: t.Optional[t.Iterable[V]] = None, - length: t.Optional[int] = None, - label: t.Optional[str] = None, - show_eta: bool = True, - show_percent: t.Optional[bool] = None, - show_pos: bool = False, - item_show_func: t.Optional[t.Callable[[t.Optional[V]], t.Optional[str]]] = None, - fill_char: str = "#", - empty_char: str = "-", - bar_template: str = "%(label)s [%(bar)s] %(info)s", - info_sep: str = " ", - width: int = 36, - file: t.Optional[t.TextIO] = None, - color: t.Optional[bool] = None, - update_min_steps: int = 1, -) -> "ProgressBar[V]": - """This function creates an iterable context manager that can be used - to iterate over something while showing a progress bar. It will - either iterate over the `iterable` or `length` items (that are counted - up). While iteration happens, this function will print a rendered - progress bar to the given `file` (defaults to stdout) and will attempt - to calculate remaining time and more. By default, this progress bar - will not be rendered if the file is not a terminal. - - The context manager creates the progress bar. When the context - manager is entered the progress bar is already created. With every - iteration over the progress bar, the iterable passed to the bar is - advanced and the bar is updated. When the context manager exits, - a newline is printed and the progress bar is finalized on screen. - - Note: The progress bar is currently designed for use cases where the - total progress can be expected to take at least several seconds. - Because of this, the ProgressBar class object won't display - progress that is considered too fast, and progress where the time - between steps is less than a second. - - No printing must happen or the progress bar will be unintentionally - destroyed. - - Example usage:: - - with progressbar(items) as bar: - for item in bar: - do_something_with(item) - - Alternatively, if no iterable is specified, one can manually update the - progress bar through the `update()` method instead of directly - iterating over the progress bar. The update method accepts the number - of steps to increment the bar with:: - - with progressbar(length=chunks.total_bytes) as bar: - for chunk in chunks: - process_chunk(chunk) - bar.update(chunks.bytes) - - The ``update()`` method also takes an optional value specifying the - ``current_item`` at the new position. This is useful when used - together with ``item_show_func`` to customize the output for each - manual step:: - - with click.progressbar( - length=total_size, - label='Unzipping archive', - item_show_func=lambda a: a.filename - ) as bar: - for archive in zip_file: - archive.extract() - bar.update(archive.size, archive) - - :param iterable: an iterable to iterate over. If not provided the length - is required. - :param length: the number of items to iterate over. By default the - progressbar will attempt to ask the iterator about its - length, which might or might not work. If an iterable is - also provided this parameter can be used to override the - length. If an iterable is not provided the progress bar - will iterate over a range of that length. - :param label: the label to show next to the progress bar. - :param show_eta: enables or disables the estimated time display. This is - automatically disabled if the length cannot be - determined. - :param show_percent: enables or disables the percentage display. The - default is `True` if the iterable has a length or - `False` if not. - :param show_pos: enables or disables the absolute position display. The - default is `False`. - :param item_show_func: A function called with the current item which - can return a string to show next to the progress bar. If the - function returns ``None`` nothing is shown. The current item can - be ``None``, such as when entering and exiting the bar. - :param fill_char: the character to use to show the filled part of the - progress bar. - :param empty_char: the character to use to show the non-filled part of - the progress bar. - :param bar_template: the format string to use as template for the bar. - The parameters in it are ``label`` for the label, - ``bar`` for the progress bar and ``info`` for the - info section. - :param info_sep: the separator between multiple info items (eta etc.) - :param width: the width of the progress bar in characters, 0 means full - terminal width - :param file: The file to write to. If this is not a terminal then - only the label is printed. - :param color: controls if the terminal supports ANSI colors or not. The - default is autodetection. This is only needed if ANSI - codes are included anywhere in the progress bar output - which is not the case by default. - :param update_min_steps: Render only when this many updates have - completed. This allows tuning for very fast iterators. - - .. versionchanged:: 8.0 - Output is shown even if execution time is less than 0.5 seconds. - - .. versionchanged:: 8.0 - ``item_show_func`` shows the current item, not the previous one. - - .. versionchanged:: 8.0 - Labels are echoed if the output is not a TTY. Reverts a change - in 7.0 that removed all output. - - .. versionadded:: 8.0 - Added the ``update_min_steps`` parameter. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. Added the ``update`` method to - the object. - - .. versionadded:: 2.0 - """ - from ._termui_impl import ProgressBar - - color = resolve_color_default(color) - return ProgressBar( - iterable=iterable, - length=length, - show_eta=show_eta, - show_percent=show_percent, - show_pos=show_pos, - item_show_func=item_show_func, - fill_char=fill_char, - empty_char=empty_char, - bar_template=bar_template, - info_sep=info_sep, - file=file, - label=label, - width=width, - color=color, - update_min_steps=update_min_steps, - ) - - -def clear() -> None: - """Clears the terminal screen. This will have the effect of clearing - the whole visible space of the terminal and moving the cursor to the - top left. This does not do anything if not connected to a terminal. - - .. versionadded:: 2.0 - """ - if not isatty(sys.stdout): - return - - # ANSI escape \033[2J clears the screen, \033[1;1H moves the cursor - echo("\033[2J\033[1;1H", nl=False) - - -def _interpret_color( - color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0 -) -> str: - if isinstance(color, int): - return f"{38 + offset};5;{color:d}" - - if isinstance(color, (tuple, list)): - r, g, b = color - return f"{38 + offset};2;{r:d};{g:d};{b:d}" - - return str(_ansi_colors[color] + offset) - - -def style( - text: t.Any, - fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, - bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None, - bold: t.Optional[bool] = None, - dim: t.Optional[bool] = None, - underline: t.Optional[bool] = None, - overline: t.Optional[bool] = None, - italic: t.Optional[bool] = None, - blink: t.Optional[bool] = None, - reverse: t.Optional[bool] = None, - strikethrough: t.Optional[bool] = None, - reset: bool = True, -) -> str: - """Styles a text with ANSI styles and returns the new string. By - default the styling is self contained which means that at the end - of the string a reset code is issued. This can be prevented by - passing ``reset=False``. - - Examples:: - - click.echo(click.style('Hello World!', fg='green')) - click.echo(click.style('ATTENTION!', blink=True)) - click.echo(click.style('Some things', reverse=True, fg='cyan')) - click.echo(click.style('More colors', fg=(255, 12, 128), bg=117)) - - Supported color names: - - * ``black`` (might be a gray) - * ``red`` - * ``green`` - * ``yellow`` (might be an orange) - * ``blue`` - * ``magenta`` - * ``cyan`` - * ``white`` (might be light gray) - * ``bright_black`` - * ``bright_red`` - * ``bright_green`` - * ``bright_yellow`` - * ``bright_blue`` - * ``bright_magenta`` - * ``bright_cyan`` - * ``bright_white`` - * ``reset`` (reset the color code only) - - If the terminal supports it, color may also be specified as: - - - An integer in the interval [0, 255]. The terminal must support - 8-bit/256-color mode. - - An RGB tuple of three integers in [0, 255]. The terminal must - support 24-bit/true-color mode. - - See https://en.wikipedia.org/wiki/ANSI_color and - https://gist.github.com/XVilka/8346728 for more information. - - :param text: the string to style with ansi codes. - :param fg: if provided this will become the foreground color. - :param bg: if provided this will become the background color. - :param bold: if provided this will enable or disable bold mode. - :param dim: if provided this will enable or disable dim mode. This is - badly supported. - :param underline: if provided this will enable or disable underline. - :param overline: if provided this will enable or disable overline. - :param italic: if provided this will enable or disable italic. - :param blink: if provided this will enable or disable blinking. - :param reverse: if provided this will enable or disable inverse - rendering (foreground becomes background and the - other way round). - :param strikethrough: if provided this will enable or disable - striking through text. - :param reset: by default a reset-all code is added at the end of the - string which means that styles do not carry over. This - can be disabled to compose styles. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. - - .. versionchanged:: 8.0 - Added support for 256 and RGB color codes. - - .. versionchanged:: 8.0 - Added the ``strikethrough``, ``italic``, and ``overline`` - parameters. - - .. versionchanged:: 7.0 - Added support for bright colors. - - .. versionadded:: 2.0 - """ - if not isinstance(text, str): - text = str(text) - - bits = [] - - if fg: - try: - bits.append(f"\033[{_interpret_color(fg)}m") - except KeyError: - raise TypeError(f"Unknown color {fg!r}") from None - - if bg: - try: - bits.append(f"\033[{_interpret_color(bg, 10)}m") - except KeyError: - raise TypeError(f"Unknown color {bg!r}") from None - - if bold is not None: - bits.append(f"\033[{1 if bold else 22}m") - if dim is not None: - bits.append(f"\033[{2 if dim else 22}m") - if underline is not None: - bits.append(f"\033[{4 if underline else 24}m") - if overline is not None: - bits.append(f"\033[{53 if overline else 55}m") - if italic is not None: - bits.append(f"\033[{3 if italic else 23}m") - if blink is not None: - bits.append(f"\033[{5 if blink else 25}m") - if reverse is not None: - bits.append(f"\033[{7 if reverse else 27}m") - if strikethrough is not None: - bits.append(f"\033[{9 if strikethrough else 29}m") - bits.append(text) - if reset: - bits.append(_ansi_reset_all) - return "".join(bits) - - -def unstyle(text: str) -> str: - """Removes ANSI styling information from a string. Usually it's not - necessary to use this function as Click's echo function will - automatically remove styling if necessary. - - .. versionadded:: 2.0 - - :param text: the text to remove style information from. - """ - return strip_ansi(text) - - -def secho( - message: t.Optional[t.Any] = None, - file: t.Optional[t.IO[t.AnyStr]] = None, - nl: bool = True, - err: bool = False, - color: t.Optional[bool] = None, - **styles: t.Any, -) -> None: - """This function combines :func:`echo` and :func:`style` into one - call. As such the following two calls are the same:: - - click.secho('Hello World!', fg='green') - click.echo(click.style('Hello World!', fg='green')) - - All keyword arguments are forwarded to the underlying functions - depending on which one they go with. - - Non-string types will be converted to :class:`str`. However, - :class:`bytes` are passed directly to :meth:`echo` without applying - style. If you want to style bytes that represent text, call - :meth:`bytes.decode` first. - - .. versionchanged:: 8.0 - A non-string ``message`` is converted to a string. Bytes are - passed through without style applied. - - .. versionadded:: 2.0 - """ - if message is not None and not isinstance(message, (bytes, bytearray)): - message = style(message, **styles) - - return echo(message, file=file, nl=nl, err=err, color=color) - - -def edit( - text: t.Optional[t.AnyStr] = None, - editor: t.Optional[str] = None, - env: t.Optional[t.Mapping[str, str]] = None, - require_save: bool = True, - extension: str = ".txt", - filename: t.Optional[str] = None, -) -> t.Optional[t.AnyStr]: - r"""Edits the given text in the defined editor. If an editor is given - (should be the full path to the executable but the regular operating - system search path is used for finding the executable) it overrides - the detected editor. Optionally, some environment variables can be - used. If the editor is closed without changes, `None` is returned. In - case a file is edited directly the return value is always `None` and - `require_save` and `extension` are ignored. - - If the editor cannot be opened a :exc:`UsageError` is raised. - - Note for Windows: to simplify cross-platform usage, the newlines are - automatically converted from POSIX to Windows and vice versa. As such, - the message here will have ``\n`` as newline markers. - - :param text: the text to edit. - :param editor: optionally the editor to use. Defaults to automatic - detection. - :param env: environment variables to forward to the editor. - :param require_save: if this is true, then not saving in the editor - will make the return value become `None`. - :param extension: the extension to tell the editor about. This defaults - to `.txt` but changing this might change syntax - highlighting. - :param filename: if provided it will edit this file instead of the - provided text contents. It will not use a temporary - file as an indirection in that case. - """ - from ._termui_impl import Editor - - ed = Editor(editor=editor, env=env, require_save=require_save, extension=extension) - - if filename is None: - return ed.edit(text) - - ed.edit_file(filename) - return None - - -def launch(url: str, wait: bool = False, locate: bool = False) -> int: - """This function launches the given URL (or filename) in the default - viewer application for this file type. If this is an executable, it - might launch the executable in a new session. The return value is - the exit code of the launched application. Usually, ``0`` indicates - success. - - Examples:: - - click.launch('https://click.palletsprojects.com/') - click.launch('/my/downloaded/file', locate=True) - - .. versionadded:: 2.0 - - :param url: URL or filename of the thing to launch. - :param wait: Wait for the program to exit before returning. This - only works if the launched program blocks. In particular, - ``xdg-open`` on Linux does not block. - :param locate: if this is set to `True` then instead of launching the - application associated with the URL it will attempt to - launch a file manager with the file located. This - might have weird effects if the URL does not point to - the filesystem. - """ - from ._termui_impl import open_url - - return open_url(url, wait=wait, locate=locate) - - -# If this is provided, getchar() calls into this instead. This is used -# for unittesting purposes. -_getchar: t.Optional[t.Callable[[bool], str]] = None - - -def getchar(echo: bool = False) -> str: - """Fetches a single character from the terminal and returns it. This - will always return a unicode character and under certain rare - circumstances this might return more than one character. The - situations which more than one character is returned is when for - whatever reason multiple characters end up in the terminal buffer or - standard input was not actually a terminal. - - Note that this will always read from the terminal, even if something - is piped into the standard input. - - Note for Windows: in rare cases when typing non-ASCII characters, this - function might wait for a second character and then return both at once. - This is because certain Unicode characters look like special-key markers. - - .. versionadded:: 2.0 - - :param echo: if set to `True`, the character read will also show up on - the terminal. The default is to not show it. - """ - global _getchar - - if _getchar is None: - from ._termui_impl import getchar as f - - _getchar = f - - return _getchar(echo) - - -def raw_terminal() -> t.ContextManager[int]: - from ._termui_impl import raw_terminal as f - - return f() - - -def pause(info: t.Optional[str] = None, err: bool = False) -> None: - """This command stops execution and waits for the user to press any - key to continue. This is similar to the Windows batch "pause" - command. If the program is not run through a terminal, this command - will instead do nothing. - - .. versionadded:: 2.0 - - .. versionadded:: 4.0 - Added the `err` parameter. - - :param info: The message to print before pausing. Defaults to - ``"Press any key to continue..."``. - :param err: if set to message goes to ``stderr`` instead of - ``stdout``, the same as with echo. - """ - if not isatty(sys.stdin) or not isatty(sys.stdout): - return - - if info is None: - info = _("Press any key to continue...") - - try: - if info: - echo(info, nl=False, err=err) - try: - getchar() - except (KeyboardInterrupt, EOFError): - pass - finally: - if info: - echo(err=err) diff --git a/myenv/Lib/site-packages/click/testing.py b/myenv/Lib/site-packages/click/testing.py deleted file mode 100644 index e0df0d2..0000000 --- a/myenv/Lib/site-packages/click/testing.py +++ /dev/null @@ -1,479 +0,0 @@ -import contextlib -import io -import os -import shlex -import shutil -import sys -import tempfile -import typing as t -from types import TracebackType - -from . import formatting -from . import termui -from . import utils -from ._compat import _find_binary_reader - -if t.TYPE_CHECKING: - from .core import BaseCommand - - -class EchoingStdin: - def __init__(self, input: t.BinaryIO, output: t.BinaryIO) -> None: - self._input = input - self._output = output - self._paused = False - - def __getattr__(self, x: str) -> t.Any: - return getattr(self._input, x) - - def _echo(self, rv: bytes) -> bytes: - if not self._paused: - self._output.write(rv) - - return rv - - def read(self, n: int = -1) -> bytes: - return self._echo(self._input.read(n)) - - def read1(self, n: int = -1) -> bytes: - return self._echo(self._input.read1(n)) # type: ignore - - def readline(self, n: int = -1) -> bytes: - return self._echo(self._input.readline(n)) - - def readlines(self) -> t.List[bytes]: - return [self._echo(x) for x in self._input.readlines()] - - def __iter__(self) -> t.Iterator[bytes]: - return iter(self._echo(x) for x in self._input) - - def __repr__(self) -> str: - return repr(self._input) - - -@contextlib.contextmanager -def _pause_echo(stream: t.Optional[EchoingStdin]) -> t.Iterator[None]: - if stream is None: - yield - else: - stream._paused = True - yield - stream._paused = False - - -class _NamedTextIOWrapper(io.TextIOWrapper): - def __init__( - self, buffer: t.BinaryIO, name: str, mode: str, **kwargs: t.Any - ) -> None: - super().__init__(buffer, **kwargs) - self._name = name - self._mode = mode - - @property - def name(self) -> str: - return self._name - - @property - def mode(self) -> str: - return self._mode - - -def make_input_stream( - input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]], charset: str -) -> t.BinaryIO: - # Is already an input stream. - if hasattr(input, "read"): - rv = _find_binary_reader(t.cast(t.IO[t.Any], input)) - - if rv is not None: - return rv - - raise TypeError("Could not find binary reader for input stream.") - - if input is None: - input = b"" - elif isinstance(input, str): - input = input.encode(charset) - - return io.BytesIO(input) - - -class Result: - """Holds the captured result of an invoked CLI script.""" - - def __init__( - self, - runner: "CliRunner", - stdout_bytes: bytes, - stderr_bytes: t.Optional[bytes], - return_value: t.Any, - exit_code: int, - exception: t.Optional[BaseException], - exc_info: t.Optional[ - t.Tuple[t.Type[BaseException], BaseException, TracebackType] - ] = None, - ): - #: The runner that created the result - self.runner = runner - #: The standard output as bytes. - self.stdout_bytes = stdout_bytes - #: The standard error as bytes, or None if not available - self.stderr_bytes = stderr_bytes - #: The value returned from the invoked command. - #: - #: .. versionadded:: 8.0 - self.return_value = return_value - #: The exit code as integer. - self.exit_code = exit_code - #: The exception that happened if one did. - self.exception = exception - #: The traceback - self.exc_info = exc_info - - @property - def output(self) -> str: - """The (standard) output as unicode string.""" - return self.stdout - - @property - def stdout(self) -> str: - """The standard output as unicode string.""" - return self.stdout_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - @property - def stderr(self) -> str: - """The standard error as unicode string.""" - if self.stderr_bytes is None: - raise ValueError("stderr not separately captured") - return self.stderr_bytes.decode(self.runner.charset, "replace").replace( - "\r\n", "\n" - ) - - def __repr__(self) -> str: - exc_str = repr(self.exception) if self.exception else "okay" - return f"<{type(self).__name__} {exc_str}>" - - -class CliRunner: - """The CLI runner provides functionality to invoke a Click command line - script for unittesting purposes in a isolated environment. This only - works in single-threaded systems without any concurrency as it changes the - global interpreter state. - - :param charset: the character set for the input and output data. - :param env: a dictionary with environment variables for overriding. - :param echo_stdin: if this is set to `True`, then reading from stdin writes - to stdout. This is useful for showing examples in - some circumstances. Note that regular prompts - will automatically echo the input. - :param mix_stderr: if this is set to `False`, then stdout and stderr are - preserved as independent streams. This is useful for - Unix-philosophy apps that have predictable stdout and - noisy stderr, such that each may be measured - independently - """ - - def __init__( - self, - charset: str = "utf-8", - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - echo_stdin: bool = False, - mix_stderr: bool = True, - ) -> None: - self.charset = charset - self.env: t.Mapping[str, t.Optional[str]] = env or {} - self.echo_stdin = echo_stdin - self.mix_stderr = mix_stderr - - def get_default_prog_name(self, cli: "BaseCommand") -> str: - """Given a command object it will return the default program name - for it. The default is the `name` attribute or ``"root"`` if not - set. - """ - return cli.name or "root" - - def make_env( - self, overrides: t.Optional[t.Mapping[str, t.Optional[str]]] = None - ) -> t.Mapping[str, t.Optional[str]]: - """Returns the environment overrides for invoking a script.""" - rv = dict(self.env) - if overrides: - rv.update(overrides) - return rv - - @contextlib.contextmanager - def isolation( - self, - input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - color: bool = False, - ) -> t.Iterator[t.Tuple[io.BytesIO, t.Optional[io.BytesIO]]]: - """A context manager that sets up the isolation for invoking of a - command line tool. This sets up stdin with the given input data - and `os.environ` with the overrides from the given dictionary. - This also rebinds some internals in Click to be mocked (like the - prompt functionality). - - This is automatically done in the :meth:`invoke` method. - - :param input: the input stream to put into sys.stdin. - :param env: the environment overrides as dictionary. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionchanged:: 8.0 - ``stderr`` is opened with ``errors="backslashreplace"`` - instead of the default ``"strict"``. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - """ - bytes_input = make_input_stream(input, self.charset) - echo_input = None - - old_stdin = sys.stdin - old_stdout = sys.stdout - old_stderr = sys.stderr - old_forced_width = formatting.FORCED_WIDTH - formatting.FORCED_WIDTH = 80 - - env = self.make_env(env) - - bytes_output = io.BytesIO() - - if self.echo_stdin: - bytes_input = echo_input = t.cast( - t.BinaryIO, EchoingStdin(bytes_input, bytes_output) - ) - - sys.stdin = text_input = _NamedTextIOWrapper( - bytes_input, encoding=self.charset, name="", mode="r" - ) - - if self.echo_stdin: - # Force unbuffered reads, otherwise TextIOWrapper reads a - # large chunk which is echoed early. - text_input._CHUNK_SIZE = 1 # type: ignore - - sys.stdout = _NamedTextIOWrapper( - bytes_output, encoding=self.charset, name="", mode="w" - ) - - bytes_error = None - if self.mix_stderr: - sys.stderr = sys.stdout - else: - bytes_error = io.BytesIO() - sys.stderr = _NamedTextIOWrapper( - bytes_error, - encoding=self.charset, - name="", - mode="w", - errors="backslashreplace", - ) - - @_pause_echo(echo_input) # type: ignore - def visible_input(prompt: t.Optional[str] = None) -> str: - sys.stdout.write(prompt or "") - val = text_input.readline().rstrip("\r\n") - sys.stdout.write(f"{val}\n") - sys.stdout.flush() - return val - - @_pause_echo(echo_input) # type: ignore - def hidden_input(prompt: t.Optional[str] = None) -> str: - sys.stdout.write(f"{prompt or ''}\n") - sys.stdout.flush() - return text_input.readline().rstrip("\r\n") - - @_pause_echo(echo_input) # type: ignore - def _getchar(echo: bool) -> str: - char = sys.stdin.read(1) - - if echo: - sys.stdout.write(char) - - sys.stdout.flush() - return char - - default_color = color - - def should_strip_ansi( - stream: t.Optional[t.IO[t.Any]] = None, color: t.Optional[bool] = None - ) -> bool: - if color is None: - return not default_color - return not color - - old_visible_prompt_func = termui.visible_prompt_func - old_hidden_prompt_func = termui.hidden_prompt_func - old__getchar_func = termui._getchar - old_should_strip_ansi = utils.should_strip_ansi # type: ignore - termui.visible_prompt_func = visible_input - termui.hidden_prompt_func = hidden_input - termui._getchar = _getchar - utils.should_strip_ansi = should_strip_ansi # type: ignore - - old_env = {} - try: - for key, value in env.items(): - old_env[key] = os.environ.get(key) - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - yield (bytes_output, bytes_error) - finally: - for key, value in old_env.items(): - if value is None: - try: - del os.environ[key] - except Exception: - pass - else: - os.environ[key] = value - sys.stdout = old_stdout - sys.stderr = old_stderr - sys.stdin = old_stdin - termui.visible_prompt_func = old_visible_prompt_func - termui.hidden_prompt_func = old_hidden_prompt_func - termui._getchar = old__getchar_func - utils.should_strip_ansi = old_should_strip_ansi # type: ignore - formatting.FORCED_WIDTH = old_forced_width - - def invoke( - self, - cli: "BaseCommand", - args: t.Optional[t.Union[str, t.Sequence[str]]] = None, - input: t.Optional[t.Union[str, bytes, t.IO[t.Any]]] = None, - env: t.Optional[t.Mapping[str, t.Optional[str]]] = None, - catch_exceptions: bool = True, - color: bool = False, - **extra: t.Any, - ) -> Result: - """Invokes a command in an isolated environment. The arguments are - forwarded directly to the command line script, the `extra` keyword - arguments are passed to the :meth:`~clickpkg.Command.main` function of - the command. - - This returns a :class:`Result` object. - - :param cli: the command to invoke - :param args: the arguments to invoke. It may be given as an iterable - or a string. When given as string it will be interpreted - as a Unix shell command. More details at - :func:`shlex.split`. - :param input: the input data for `sys.stdin`. - :param env: the environment overrides. - :param catch_exceptions: Whether to catch any other exceptions than - ``SystemExit``. - :param extra: the keyword arguments to pass to :meth:`main`. - :param color: whether the output should contain color codes. The - application can still override this explicitly. - - .. versionchanged:: 8.0 - The result object has the ``return_value`` attribute with - the value returned from the invoked command. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionchanged:: 3.0 - Added the ``catch_exceptions`` parameter. - - .. versionchanged:: 3.0 - The result object has the ``exc_info`` attribute with the - traceback if available. - """ - exc_info = None - with self.isolation(input=input, env=env, color=color) as outstreams: - return_value = None - exception: t.Optional[BaseException] = None - exit_code = 0 - - if isinstance(args, str): - args = shlex.split(args) - - try: - prog_name = extra.pop("prog_name") - except KeyError: - prog_name = self.get_default_prog_name(cli) - - try: - return_value = cli.main(args=args or (), prog_name=prog_name, **extra) - except SystemExit as e: - exc_info = sys.exc_info() - e_code = t.cast(t.Optional[t.Union[int, t.Any]], e.code) - - if e_code is None: - e_code = 0 - - if e_code != 0: - exception = e - - if not isinstance(e_code, int): - sys.stdout.write(str(e_code)) - sys.stdout.write("\n") - e_code = 1 - - exit_code = e_code - - except Exception as e: - if not catch_exceptions: - raise - exception = e - exit_code = 1 - exc_info = sys.exc_info() - finally: - sys.stdout.flush() - stdout = outstreams[0].getvalue() - if self.mix_stderr: - stderr = None - else: - stderr = outstreams[1].getvalue() # type: ignore - - return Result( - runner=self, - stdout_bytes=stdout, - stderr_bytes=stderr, - return_value=return_value, - exit_code=exit_code, - exception=exception, - exc_info=exc_info, # type: ignore - ) - - @contextlib.contextmanager - def isolated_filesystem( - self, temp_dir: t.Optional[t.Union[str, "os.PathLike[str]"]] = None - ) -> t.Iterator[str]: - """A context manager that creates a temporary directory and - changes the current working directory to it. This isolates tests - that affect the contents of the CWD to prevent them from - interfering with each other. - - :param temp_dir: Create the temporary directory under this - directory. If given, the created directory is not removed - when exiting. - - .. versionchanged:: 8.0 - Added the ``temp_dir`` parameter. - """ - cwd = os.getcwd() - dt = tempfile.mkdtemp(dir=temp_dir) - os.chdir(dt) - - try: - yield dt - finally: - os.chdir(cwd) - - if temp_dir is None: - try: - shutil.rmtree(dt) - except OSError: # noqa: B014 - pass diff --git a/myenv/Lib/site-packages/click/types.py b/myenv/Lib/site-packages/click/types.py deleted file mode 100644 index 2b1d179..0000000 --- a/myenv/Lib/site-packages/click/types.py +++ /dev/null @@ -1,1089 +0,0 @@ -import os -import stat -import sys -import typing as t -from datetime import datetime -from gettext import gettext as _ -from gettext import ngettext - -from ._compat import _get_argv_encoding -from ._compat import open_stream -from .exceptions import BadParameter -from .utils import format_filename -from .utils import LazyFile -from .utils import safecall - -if t.TYPE_CHECKING: - import typing_extensions as te - from .core import Context - from .core import Parameter - from .shell_completion import CompletionItem - - -class ParamType: - """Represents the type of a parameter. Validates and converts values - from the command line or Python into the correct type. - - To implement a custom type, subclass and implement at least the - following: - - - The :attr:`name` class attribute must be set. - - Calling an instance of the type with ``None`` must return - ``None``. This is already implemented by default. - - :meth:`convert` must convert string values to the correct type. - - :meth:`convert` must accept values that are already the correct - type. - - It must be able to convert a value if the ``ctx`` and ``param`` - arguments are ``None``. This can occur when converting prompt - input. - """ - - is_composite: t.ClassVar[bool] = False - arity: t.ClassVar[int] = 1 - - #: the descriptive name of this type - name: str - - #: if a list of this type is expected and the value is pulled from a - #: string environment variable, this is what splits it up. `None` - #: means any whitespace. For all parameters the general rule is that - #: whitespace splits them up. The exception are paths and files which - #: are split by ``os.path.pathsep`` by default (":" on Unix and ";" on - #: Windows). - envvar_list_splitter: t.ClassVar[t.Optional[str]] = None - - def to_info_dict(self) -> t.Dict[str, t.Any]: - """Gather information that could be useful for a tool generating - user-facing documentation. - - Use :meth:`click.Context.to_info_dict` to traverse the entire - CLI structure. - - .. versionadded:: 8.0 - """ - # The class name without the "ParamType" suffix. - param_type = type(self).__name__.partition("ParamType")[0] - param_type = param_type.partition("ParameterType")[0] - - # Custom subclasses might not remember to set a name. - if hasattr(self, "name"): - name = self.name - else: - name = param_type - - return {"param_type": param_type, "name": name} - - def __call__( - self, - value: t.Any, - param: t.Optional["Parameter"] = None, - ctx: t.Optional["Context"] = None, - ) -> t.Any: - if value is not None: - return self.convert(value, param, ctx) - - def get_metavar(self, param: "Parameter") -> t.Optional[str]: - """Returns the metavar default for this param if it provides one.""" - - def get_missing_message(self, param: "Parameter") -> t.Optional[str]: - """Optionally might return extra information about a missing - parameter. - - .. versionadded:: 2.0 - """ - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - """Convert the value to the correct type. This is not called if - the value is ``None`` (the missing value). - - This must accept string values from the command line, as well as - values that are already the correct type. It may also convert - other compatible types. - - The ``param`` and ``ctx`` arguments may be ``None`` in certain - situations, such as when converting prompt input. - - If the value cannot be converted, call :meth:`fail` with a - descriptive message. - - :param value: The value to convert. - :param param: The parameter that is using this type to convert - its value. May be ``None``. - :param ctx: The current context that arrived at this value. May - be ``None``. - """ - return value - - def split_envvar_value(self, rv: str) -> t.Sequence[str]: - """Given a value from an environment variable this splits it up - into small chunks depending on the defined envvar list splitter. - - If the splitter is set to `None`, which means that whitespace splits, - then leading and trailing whitespace is ignored. Otherwise, leading - and trailing splitters usually lead to empty items being included. - """ - return (rv or "").split(self.envvar_list_splitter) - - def fail( - self, - message: str, - param: t.Optional["Parameter"] = None, - ctx: t.Optional["Context"] = None, - ) -> "t.NoReturn": - """Helper method to fail with an invalid value message.""" - raise BadParameter(message, ctx=ctx, param=param) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a list of - :class:`~click.shell_completion.CompletionItem` objects for the - incomplete value. Most types do not provide completions, but - some do, and this allows custom types to provide custom - completions as well. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - return [] - - -class CompositeParamType(ParamType): - is_composite = True - - @property - def arity(self) -> int: # type: ignore - raise NotImplementedError() - - -class FuncParamType(ParamType): - def __init__(self, func: t.Callable[[t.Any], t.Any]) -> None: - self.name: str = func.__name__ - self.func = func - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["func"] = self.func - return info_dict - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - try: - return self.func(value) - except ValueError: - try: - value = str(value) - except UnicodeError: - value = value.decode("utf-8", "replace") - - self.fail(value, param, ctx) - - -class UnprocessedParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - return value - - def __repr__(self) -> str: - return "UNPROCESSED" - - -class StringParamType(ParamType): - name = "text" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if isinstance(value, bytes): - enc = _get_argv_encoding() - try: - value = value.decode(enc) - except UnicodeError: - fs_enc = sys.getfilesystemencoding() - if fs_enc != enc: - try: - value = value.decode(fs_enc) - except UnicodeError: - value = value.decode("utf-8", "replace") - else: - value = value.decode("utf-8", "replace") - return value - return str(value) - - def __repr__(self) -> str: - return "STRING" - - -class Choice(ParamType): - """The choice type allows a value to be checked against a fixed set - of supported values. All of these values have to be strings. - - You should only pass a list or tuple of choices. Other iterables - (like generators) may lead to surprising results. - - The resulting value will always be one of the originally passed choices - regardless of ``case_sensitive`` or any ``ctx.token_normalize_func`` - being specified. - - See :ref:`choice-opts` for an example. - - :param case_sensitive: Set to false to make choices case - insensitive. Defaults to true. - """ - - name = "choice" - - def __init__(self, choices: t.Sequence[str], case_sensitive: bool = True) -> None: - self.choices = choices - self.case_sensitive = case_sensitive - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["choices"] = self.choices - info_dict["case_sensitive"] = self.case_sensitive - return info_dict - - def get_metavar(self, param: "Parameter") -> str: - choices_str = "|".join(self.choices) - - # Use curly braces to indicate a required argument. - if param.required and param.param_type_name == "argument": - return f"{{{choices_str}}}" - - # Use square braces to indicate an option or optional argument. - return f"[{choices_str}]" - - def get_missing_message(self, param: "Parameter") -> str: - return _("Choose from:\n\t{choices}").format(choices=",\n\t".join(self.choices)) - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - # Match through normalization and case sensitivity - # first do token_normalize_func, then lowercase - # preserve original `value` to produce an accurate message in - # `self.fail` - normed_value = value - normed_choices = {choice: choice for choice in self.choices} - - if ctx is not None and ctx.token_normalize_func is not None: - normed_value = ctx.token_normalize_func(value) - normed_choices = { - ctx.token_normalize_func(normed_choice): original - for normed_choice, original in normed_choices.items() - } - - if not self.case_sensitive: - normed_value = normed_value.casefold() - normed_choices = { - normed_choice.casefold(): original - for normed_choice, original in normed_choices.items() - } - - if normed_value in normed_choices: - return normed_choices[normed_value] - - choices_str = ", ".join(map(repr, self.choices)) - self.fail( - ngettext( - "{value!r} is not {choice}.", - "{value!r} is not one of {choices}.", - len(self.choices), - ).format(value=value, choice=choices_str, choices=choices_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return f"Choice({list(self.choices)})" - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Complete choices that start with the incomplete value. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - str_choices = map(str, self.choices) - - if self.case_sensitive: - matched = (c for c in str_choices if c.startswith(incomplete)) - else: - incomplete = incomplete.lower() - matched = (c for c in str_choices if c.lower().startswith(incomplete)) - - return [CompletionItem(c) for c in matched] - - -class DateTime(ParamType): - """The DateTime type converts date strings into `datetime` objects. - - The format strings which are checked are configurable, but default to some - common (non-timezone aware) ISO 8601 formats. - - When specifying *DateTime* formats, you should only pass a list or a tuple. - Other iterables, like generators, may lead to surprising results. - - The format strings are processed using ``datetime.strptime``, and this - consequently defines the format strings which are allowed. - - Parsing is tried using each format, in order, and the first format which - parses successfully is used. - - :param formats: A list or tuple of date format strings, in the order in - which they should be tried. Defaults to - ``'%Y-%m-%d'``, ``'%Y-%m-%dT%H:%M:%S'``, - ``'%Y-%m-%d %H:%M:%S'``. - """ - - name = "datetime" - - def __init__(self, formats: t.Optional[t.Sequence[str]] = None): - self.formats: t.Sequence[str] = formats or [ - "%Y-%m-%d", - "%Y-%m-%dT%H:%M:%S", - "%Y-%m-%d %H:%M:%S", - ] - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["formats"] = self.formats - return info_dict - - def get_metavar(self, param: "Parameter") -> str: - return f"[{'|'.join(self.formats)}]" - - def _try_to_convert_date(self, value: t.Any, format: str) -> t.Optional[datetime]: - try: - return datetime.strptime(value, format) - except ValueError: - return None - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if isinstance(value, datetime): - return value - - for format in self.formats: - converted = self._try_to_convert_date(value, format) - - if converted is not None: - return converted - - formats_str = ", ".join(map(repr, self.formats)) - self.fail( - ngettext( - "{value!r} does not match the format {format}.", - "{value!r} does not match the formats {formats}.", - len(self.formats), - ).format(value=value, format=formats_str, formats=formats_str), - param, - ctx, - ) - - def __repr__(self) -> str: - return "DateTime" - - -class _NumberParamTypeBase(ParamType): - _number_class: t.ClassVar[t.Type[t.Any]] - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - try: - return self._number_class(value) - except ValueError: - self.fail( - _("{value!r} is not a valid {number_type}.").format( - value=value, number_type=self.name - ), - param, - ctx, - ) - - -class _NumberRangeBase(_NumberParamTypeBase): - def __init__( - self, - min: t.Optional[float] = None, - max: t.Optional[float] = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - self.min = min - self.max = max - self.min_open = min_open - self.max_open = max_open - self.clamp = clamp - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - min=self.min, - max=self.max, - min_open=self.min_open, - max_open=self.max_open, - clamp=self.clamp, - ) - return info_dict - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - import operator - - rv = super().convert(value, param, ctx) - lt_min: bool = self.min is not None and ( - operator.le if self.min_open else operator.lt - )(rv, self.min) - gt_max: bool = self.max is not None and ( - operator.ge if self.max_open else operator.gt - )(rv, self.max) - - if self.clamp: - if lt_min: - return self._clamp(self.min, 1, self.min_open) # type: ignore - - if gt_max: - return self._clamp(self.max, -1, self.max_open) # type: ignore - - if lt_min or gt_max: - self.fail( - _("{value} is not in the range {range}.").format( - value=rv, range=self._describe_range() - ), - param, - ctx, - ) - - return rv - - def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: - """Find the valid value to clamp to bound in the given - direction. - - :param bound: The boundary value. - :param dir: 1 or -1 indicating the direction to move. - :param open: If true, the range does not include the bound. - """ - raise NotImplementedError - - def _describe_range(self) -> str: - """Describe the range for use in help text.""" - if self.min is None: - op = "<" if self.max_open else "<=" - return f"x{op}{self.max}" - - if self.max is None: - op = ">" if self.min_open else ">=" - return f"x{op}{self.min}" - - lop = "<" if self.min_open else "<=" - rop = "<" if self.max_open else "<=" - return f"{self.min}{lop}x{rop}{self.max}" - - def __repr__(self) -> str: - clamp = " clamped" if self.clamp else "" - return f"<{type(self).__name__} {self._describe_range()}{clamp}>" - - -class IntParamType(_NumberParamTypeBase): - name = "integer" - _number_class = int - - def __repr__(self) -> str: - return "INT" - - -class IntRange(_NumberRangeBase, IntParamType): - """Restrict an :data:`click.INT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "integer range" - - def _clamp( # type: ignore - self, bound: int, dir: "te.Literal[1, -1]", open: bool - ) -> int: - if not open: - return bound - - return bound + dir - - -class FloatParamType(_NumberParamTypeBase): - name = "float" - _number_class = float - - def __repr__(self) -> str: - return "FLOAT" - - -class FloatRange(_NumberRangeBase, FloatParamType): - """Restrict a :data:`click.FLOAT` value to a range of accepted - values. See :ref:`ranges`. - - If ``min`` or ``max`` are not passed, any value is accepted in that - direction. If ``min_open`` or ``max_open`` are enabled, the - corresponding boundary is not included in the range. - - If ``clamp`` is enabled, a value outside the range is clamped to the - boundary instead of failing. This is not supported if either - boundary is marked ``open``. - - .. versionchanged:: 8.0 - Added the ``min_open`` and ``max_open`` parameters. - """ - - name = "float range" - - def __init__( - self, - min: t.Optional[float] = None, - max: t.Optional[float] = None, - min_open: bool = False, - max_open: bool = False, - clamp: bool = False, - ) -> None: - super().__init__( - min=min, max=max, min_open=min_open, max_open=max_open, clamp=clamp - ) - - if (min_open or max_open) and clamp: - raise TypeError("Clamping is not supported for open bounds.") - - def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float: - if not open: - return bound - - # Could use Python 3.9's math.nextafter here, but clamping an - # open float range doesn't seem to be particularly useful. It's - # left up to the user to write a callback to do it if needed. - raise RuntimeError("Clamping is not supported for open bounds.") - - -class BoolParamType(ParamType): - name = "boolean" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - if value in {False, True}: - return bool(value) - - norm = value.strip().lower() - - if norm in {"1", "true", "t", "yes", "y", "on"}: - return True - - if norm in {"0", "false", "f", "no", "n", "off"}: - return False - - self.fail( - _("{value!r} is not a valid boolean.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "BOOL" - - -class UUIDParameterType(ParamType): - name = "uuid" - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - import uuid - - if isinstance(value, uuid.UUID): - return value - - value = value.strip() - - try: - return uuid.UUID(value) - except ValueError: - self.fail( - _("{value!r} is not a valid UUID.").format(value=value), param, ctx - ) - - def __repr__(self) -> str: - return "UUID" - - -class File(ParamType): - """Declares a parameter to be a file for reading or writing. The file - is automatically closed once the context tears down (after the command - finished working). - - Files can be opened for reading or writing. The special value ``-`` - indicates stdin or stdout depending on the mode. - - By default, the file is opened for reading text data, but it can also be - opened in binary mode or for writing. The encoding parameter can be used - to force a specific encoding. - - The `lazy` flag controls if the file should be opened immediately or upon - first IO. The default is to be non-lazy for standard input and output - streams as well as files opened for reading, `lazy` otherwise. When opening a - file lazily for reading, it is still opened temporarily for validation, but - will not be held open until first IO. lazy is mainly useful when opening - for writing to avoid creating the file until it is needed. - - Starting with Click 2.0, files can also be opened atomically in which - case all writes go into a separate file in the same folder and upon - completion the file will be moved over to the original location. This - is useful if a file regularly read by other users is modified. - - See :ref:`file-args` for more information. - """ - - name = "filename" - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - lazy: t.Optional[bool] = None, - atomic: bool = False, - ) -> None: - self.mode = mode - self.encoding = encoding - self.errors = errors - self.lazy = lazy - self.atomic = atomic - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update(mode=self.mode, encoding=self.encoding) - return info_dict - - def resolve_lazy_flag(self, value: "t.Union[str, os.PathLike[str]]") -> bool: - if self.lazy is not None: - return self.lazy - if os.fspath(value) == "-": - return False - elif "w" in self.mode: - return True - return False - - def convert( - self, - value: t.Union[str, "os.PathLike[str]", t.IO[t.Any]], - param: t.Optional["Parameter"], - ctx: t.Optional["Context"], - ) -> t.IO[t.Any]: - if _is_file_like(value): - return value - - value = t.cast("t.Union[str, os.PathLike[str]]", value) - - try: - lazy = self.resolve_lazy_flag(value) - - if lazy: - lf = LazyFile( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - if ctx is not None: - ctx.call_on_close(lf.close_intelligently) - - return t.cast(t.IO[t.Any], lf) - - f, should_close = open_stream( - value, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - - # If a context is provided, we automatically close the file - # at the end of the context execution (or flush out). If a - # context does not exist, it's the caller's responsibility to - # properly close the file. This for instance happens when the - # type is used with prompts. - if ctx is not None: - if should_close: - ctx.call_on_close(safecall(f.close)) - else: - ctx.call_on_close(safecall(f.flush)) - - return f - except OSError as e: # noqa: B014 - self.fail(f"'{format_filename(value)}': {e.strerror}", param, ctx) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a special completion marker that tells the completion - system to use the shell to provide file path completions. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - return [CompletionItem(incomplete, type="file")] - - -def _is_file_like(value: t.Any) -> "te.TypeGuard[t.IO[t.Any]]": - return hasattr(value, "read") or hasattr(value, "write") - - -class Path(ParamType): - """The ``Path`` type is similar to the :class:`File` type, but - returns the filename instead of an open file. Various checks can be - enabled to validate the type of file and permissions. - - :param exists: The file or directory needs to exist for the value to - be valid. If this is not set to ``True``, and the file does not - exist, then all further checks are silently skipped. - :param file_okay: Allow a file as a value. - :param dir_okay: Allow a directory as a value. - :param readable: if true, a readable check is performed. - :param writable: if true, a writable check is performed. - :param executable: if true, an executable check is performed. - :param resolve_path: Make the value absolute and resolve any - symlinks. A ``~`` is not expanded, as this is supposed to be - done by the shell only. - :param allow_dash: Allow a single dash as a value, which indicates - a standard stream (but does not open it). Use - :func:`~click.open_file` to handle opening this value. - :param path_type: Convert the incoming path value to this type. If - ``None``, keep Python's default, which is ``str``. Useful to - convert to :class:`pathlib.Path`. - - .. versionchanged:: 8.1 - Added the ``executable`` parameter. - - .. versionchanged:: 8.0 - Allow passing ``path_type=pathlib.Path``. - - .. versionchanged:: 6.0 - Added the ``allow_dash`` parameter. - """ - - envvar_list_splitter: t.ClassVar[str] = os.path.pathsep - - def __init__( - self, - exists: bool = False, - file_okay: bool = True, - dir_okay: bool = True, - writable: bool = False, - readable: bool = True, - resolve_path: bool = False, - allow_dash: bool = False, - path_type: t.Optional[t.Type[t.Any]] = None, - executable: bool = False, - ): - self.exists = exists - self.file_okay = file_okay - self.dir_okay = dir_okay - self.readable = readable - self.writable = writable - self.executable = executable - self.resolve_path = resolve_path - self.allow_dash = allow_dash - self.type = path_type - - if self.file_okay and not self.dir_okay: - self.name: str = _("file") - elif self.dir_okay and not self.file_okay: - self.name = _("directory") - else: - self.name = _("path") - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict.update( - exists=self.exists, - file_okay=self.file_okay, - dir_okay=self.dir_okay, - writable=self.writable, - readable=self.readable, - allow_dash=self.allow_dash, - ) - return info_dict - - def coerce_path_result( - self, value: "t.Union[str, os.PathLike[str]]" - ) -> "t.Union[str, bytes, os.PathLike[str]]": - if self.type is not None and not isinstance(value, self.type): - if self.type is str: - return os.fsdecode(value) - elif self.type is bytes: - return os.fsencode(value) - else: - return t.cast("os.PathLike[str]", self.type(value)) - - return value - - def convert( - self, - value: "t.Union[str, os.PathLike[str]]", - param: t.Optional["Parameter"], - ctx: t.Optional["Context"], - ) -> "t.Union[str, bytes, os.PathLike[str]]": - rv = value - - is_dash = self.file_okay and self.allow_dash and rv in (b"-", "-") - - if not is_dash: - if self.resolve_path: - # os.path.realpath doesn't resolve symlinks on Windows - # until Python 3.8. Use pathlib for now. - import pathlib - - rv = os.fsdecode(pathlib.Path(rv).resolve()) - - try: - st = os.stat(rv) - except OSError: - if not self.exists: - return self.coerce_path_result(rv) - self.fail( - _("{name} {filename!r} does not exist.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if not self.file_okay and stat.S_ISREG(st.st_mode): - self.fail( - _("{name} {filename!r} is a file.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - if not self.dir_okay and stat.S_ISDIR(st.st_mode): - self.fail( - _("{name} '{filename}' is a directory.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.readable and not os.access(rv, os.R_OK): - self.fail( - _("{name} {filename!r} is not readable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.writable and not os.access(rv, os.W_OK): - self.fail( - _("{name} {filename!r} is not writable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - if self.executable and not os.access(value, os.X_OK): - self.fail( - _("{name} {filename!r} is not executable.").format( - name=self.name.title(), filename=format_filename(value) - ), - param, - ctx, - ) - - return self.coerce_path_result(rv) - - def shell_complete( - self, ctx: "Context", param: "Parameter", incomplete: str - ) -> t.List["CompletionItem"]: - """Return a special completion marker that tells the completion - system to use the shell to provide path completions for only - directories or any paths. - - :param ctx: Invocation context for this command. - :param param: The parameter that is requesting completion. - :param incomplete: Value being completed. May be empty. - - .. versionadded:: 8.0 - """ - from click.shell_completion import CompletionItem - - type = "dir" if self.dir_okay and not self.file_okay else "file" - return [CompletionItem(incomplete, type=type)] - - -class Tuple(CompositeParamType): - """The default behavior of Click is to apply a type on a value directly. - This works well in most cases, except for when `nargs` is set to a fixed - count and different types should be used for different items. In this - case the :class:`Tuple` type can be used. This type can only be used - if `nargs` is set to a fixed number. - - For more information see :ref:`tuple-type`. - - This can be selected by using a Python tuple literal as a type. - - :param types: a list of types that should be used for the tuple items. - """ - - def __init__(self, types: t.Sequence[t.Union[t.Type[t.Any], ParamType]]) -> None: - self.types: t.Sequence[ParamType] = [convert_type(ty) for ty in types] - - def to_info_dict(self) -> t.Dict[str, t.Any]: - info_dict = super().to_info_dict() - info_dict["types"] = [t.to_info_dict() for t in self.types] - return info_dict - - @property - def name(self) -> str: # type: ignore - return f"<{' '.join(ty.name for ty in self.types)}>" - - @property - def arity(self) -> int: # type: ignore - return len(self.types) - - def convert( - self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"] - ) -> t.Any: - len_type = len(self.types) - len_value = len(value) - - if len_value != len_type: - self.fail( - ngettext( - "{len_type} values are required, but {len_value} was given.", - "{len_type} values are required, but {len_value} were given.", - len_value, - ).format(len_type=len_type, len_value=len_value), - param=param, - ctx=ctx, - ) - - return tuple(ty(x, param, ctx) for ty, x in zip(self.types, value)) - - -def convert_type(ty: t.Optional[t.Any], default: t.Optional[t.Any] = None) -> ParamType: - """Find the most appropriate :class:`ParamType` for the given Python - type. If the type isn't provided, it can be inferred from a default - value. - """ - guessed_type = False - - if ty is None and default is not None: - if isinstance(default, (tuple, list)): - # If the default is empty, ty will remain None and will - # return STRING. - if default: - item = default[0] - - # A tuple of tuples needs to detect the inner types. - # Can't call convert recursively because that would - # incorrectly unwind the tuple to a single type. - if isinstance(item, (tuple, list)): - ty = tuple(map(type, item)) - else: - ty = type(item) - else: - ty = type(default) - - guessed_type = True - - if isinstance(ty, tuple): - return Tuple(ty) - - if isinstance(ty, ParamType): - return ty - - if ty is str or ty is None: - return STRING - - if ty is int: - return INT - - if ty is float: - return FLOAT - - if ty is bool: - return BOOL - - if guessed_type: - return STRING - - if __debug__: - try: - if issubclass(ty, ParamType): - raise AssertionError( - f"Attempted to use an uninstantiated parameter type ({ty})." - ) - except TypeError: - # ty is an instance (correct), so issubclass fails. - pass - - return FuncParamType(ty) - - -#: A dummy parameter type that just does nothing. From a user's -#: perspective this appears to just be the same as `STRING` but -#: internally no string conversion takes place if the input was bytes. -#: This is usually useful when working with file paths as they can -#: appear in bytes and unicode. -#: -#: For path related uses the :class:`Path` type is a better choice but -#: there are situations where an unprocessed type is useful which is why -#: it is is provided. -#: -#: .. versionadded:: 4.0 -UNPROCESSED = UnprocessedParamType() - -#: A unicode string parameter type which is the implicit default. This -#: can also be selected by using ``str`` as type. -STRING = StringParamType() - -#: An integer parameter. This can also be selected by using ``int`` as -#: type. -INT = IntParamType() - -#: A floating point value parameter. This can also be selected by using -#: ``float`` as type. -FLOAT = FloatParamType() - -#: A boolean parameter. This is the default for boolean flags. This can -#: also be selected by using ``bool`` as a type. -BOOL = BoolParamType() - -#: A UUID parameter. -UUID = UUIDParameterType() diff --git a/myenv/Lib/site-packages/click/utils.py b/myenv/Lib/site-packages/click/utils.py deleted file mode 100644 index d536434..0000000 --- a/myenv/Lib/site-packages/click/utils.py +++ /dev/null @@ -1,624 +0,0 @@ -import os -import re -import sys -import typing as t -from functools import update_wrapper -from types import ModuleType -from types import TracebackType - -from ._compat import _default_text_stderr -from ._compat import _default_text_stdout -from ._compat import _find_binary_writer -from ._compat import auto_wrap_for_ansi -from ._compat import binary_streams -from ._compat import open_stream -from ._compat import should_strip_ansi -from ._compat import strip_ansi -from ._compat import text_streams -from ._compat import WIN -from .globals import resolve_color_default - -if t.TYPE_CHECKING: - import typing_extensions as te - - P = te.ParamSpec("P") - -R = t.TypeVar("R") - - -def _posixify(name: str) -> str: - return "-".join(name.split()).lower() - - -def safecall(func: "t.Callable[P, R]") -> "t.Callable[P, t.Optional[R]]": - """Wraps a function so that it swallows exceptions.""" - - def wrapper(*args: "P.args", **kwargs: "P.kwargs") -> t.Optional[R]: - try: - return func(*args, **kwargs) - except Exception: - pass - return None - - return update_wrapper(wrapper, func) - - -def make_str(value: t.Any) -> str: - """Converts a value into a valid string.""" - if isinstance(value, bytes): - try: - return value.decode(sys.getfilesystemencoding()) - except UnicodeError: - return value.decode("utf-8", "replace") - return str(value) - - -def make_default_short_help(help: str, max_length: int = 45) -> str: - """Returns a condensed version of help string.""" - # Consider only the first paragraph. - paragraph_end = help.find("\n\n") - - if paragraph_end != -1: - help = help[:paragraph_end] - - # Collapse newlines, tabs, and spaces. - words = help.split() - - if not words: - return "" - - # The first paragraph started with a "no rewrap" marker, ignore it. - if words[0] == "\b": - words = words[1:] - - total_length = 0 - last_index = len(words) - 1 - - for i, word in enumerate(words): - total_length += len(word) + (i > 0) - - if total_length > max_length: # too long, truncate - break - - if word[-1] == ".": # sentence end, truncate without "..." - return " ".join(words[: i + 1]) - - if total_length == max_length and i != last_index: - break # not at sentence end, truncate with "..." - else: - return " ".join(words) # no truncation needed - - # Account for the length of the suffix. - total_length += len("...") - - # remove words until the length is short enough - while i > 0: - total_length -= len(words[i]) + (i > 0) - - if total_length <= max_length: - break - - i -= 1 - - return " ".join(words[:i]) + "..." - - -class LazyFile: - """A lazy file works like a regular file but it does not fully open - the file but it does perform some basic checks early to see if the - filename parameter does make sense. This is useful for safely opening - files for writing. - """ - - def __init__( - self, - filename: t.Union[str, "os.PathLike[str]"], - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - atomic: bool = False, - ): - self.name: str = os.fspath(filename) - self.mode = mode - self.encoding = encoding - self.errors = errors - self.atomic = atomic - self._f: t.Optional[t.IO[t.Any]] - self.should_close: bool - - if self.name == "-": - self._f, self.should_close = open_stream(filename, mode, encoding, errors) - else: - if "r" in mode: - # Open and close the file in case we're opening it for - # reading so that we can catch at least some errors in - # some cases early. - open(filename, mode).close() - self._f = None - self.should_close = True - - def __getattr__(self, name: str) -> t.Any: - return getattr(self.open(), name) - - def __repr__(self) -> str: - if self._f is not None: - return repr(self._f) - return f"" - - def open(self) -> t.IO[t.Any]: - """Opens the file if it's not yet open. This call might fail with - a :exc:`FileError`. Not handling this error will produce an error - that Click shows. - """ - if self._f is not None: - return self._f - try: - rv, self.should_close = open_stream( - self.name, self.mode, self.encoding, self.errors, atomic=self.atomic - ) - except OSError as e: # noqa: E402 - from .exceptions import FileError - - raise FileError(self.name, hint=e.strerror) from e - self._f = rv - return rv - - def close(self) -> None: - """Closes the underlying file, no matter what.""" - if self._f is not None: - self._f.close() - - def close_intelligently(self) -> None: - """This function only closes the file if it was opened by the lazy - file wrapper. For instance this will never close stdin. - """ - if self.should_close: - self.close() - - def __enter__(self) -> "LazyFile": - return self - - def __exit__( - self, - exc_type: t.Optional[t.Type[BaseException]], - exc_value: t.Optional[BaseException], - tb: t.Optional[TracebackType], - ) -> None: - self.close_intelligently() - - def __iter__(self) -> t.Iterator[t.AnyStr]: - self.open() - return iter(self._f) # type: ignore - - -class KeepOpenFile: - def __init__(self, file: t.IO[t.Any]) -> None: - self._file: t.IO[t.Any] = file - - def __getattr__(self, name: str) -> t.Any: - return getattr(self._file, name) - - def __enter__(self) -> "KeepOpenFile": - return self - - def __exit__( - self, - exc_type: t.Optional[t.Type[BaseException]], - exc_value: t.Optional[BaseException], - tb: t.Optional[TracebackType], - ) -> None: - pass - - def __repr__(self) -> str: - return repr(self._file) - - def __iter__(self) -> t.Iterator[t.AnyStr]: - return iter(self._file) - - -def echo( - message: t.Optional[t.Any] = None, - file: t.Optional[t.IO[t.Any]] = None, - nl: bool = True, - err: bool = False, - color: t.Optional[bool] = None, -) -> None: - """Print a message and newline to stdout or a file. This should be - used instead of :func:`print` because it provides better support - for different data, files, and environments. - - Compared to :func:`print`, this does the following: - - - Ensures that the output encoding is not misconfigured on Linux. - - Supports Unicode in the Windows console. - - Supports writing to binary outputs, and supports writing bytes - to text outputs. - - Supports colors and styles on Windows. - - Removes ANSI color and style codes if the output does not look - like an interactive terminal. - - Always flushes the output. - - :param message: The string or bytes to output. Other objects are - converted to strings. - :param file: The file to write to. Defaults to ``stdout``. - :param err: Write to ``stderr`` instead of ``stdout``. - :param nl: Print a newline after the message. Enabled by default. - :param color: Force showing or hiding colors and other styles. By - default Click will remove color if the output does not look like - an interactive terminal. - - .. versionchanged:: 6.0 - Support Unicode output on the Windows console. Click does not - modify ``sys.stdout``, so ``sys.stdout.write()`` and ``print()`` - will still not support Unicode. - - .. versionchanged:: 4.0 - Added the ``color`` parameter. - - .. versionadded:: 3.0 - Added the ``err`` parameter. - - .. versionchanged:: 2.0 - Support colors on Windows if colorama is installed. - """ - if file is None: - if err: - file = _default_text_stderr() - else: - file = _default_text_stdout() - - # There are no standard streams attached to write to. For example, - # pythonw on Windows. - if file is None: - return - - # Convert non bytes/text into the native string type. - if message is not None and not isinstance(message, (str, bytes, bytearray)): - out: t.Optional[t.Union[str, bytes]] = str(message) - else: - out = message - - if nl: - out = out or "" - if isinstance(out, str): - out += "\n" - else: - out += b"\n" - - if not out: - file.flush() - return - - # If there is a message and the value looks like bytes, we manually - # need to find the binary stream and write the message in there. - # This is done separately so that most stream types will work as you - # would expect. Eg: you can write to StringIO for other cases. - if isinstance(out, (bytes, bytearray)): - binary_file = _find_binary_writer(file) - - if binary_file is not None: - file.flush() - binary_file.write(out) - binary_file.flush() - return - - # ANSI style code support. For no message or bytes, nothing happens. - # When outputting to a file instead of a terminal, strip codes. - else: - color = resolve_color_default(color) - - if should_strip_ansi(file, color): - out = strip_ansi(out) - elif WIN: - if auto_wrap_for_ansi is not None: - file = auto_wrap_for_ansi(file) # type: ignore - elif not color: - out = strip_ansi(out) - - file.write(out) # type: ignore - file.flush() - - -def get_binary_stream(name: "te.Literal['stdin', 'stdout', 'stderr']") -> t.BinaryIO: - """Returns a system stream for byte processing. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - """ - opener = binary_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener() - - -def get_text_stream( - name: "te.Literal['stdin', 'stdout', 'stderr']", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", -) -> t.TextIO: - """Returns a system stream for text processing. This usually returns - a wrapped stream around a binary stream returned from - :func:`get_binary_stream` but it also can take shortcuts for already - correctly configured streams. - - :param name: the name of the stream to open. Valid names are ``'stdin'``, - ``'stdout'`` and ``'stderr'`` - :param encoding: overrides the detected default encoding. - :param errors: overrides the default error mode. - """ - opener = text_streams.get(name) - if opener is None: - raise TypeError(f"Unknown standard stream '{name}'") - return opener(encoding, errors) - - -def open_file( - filename: str, - mode: str = "r", - encoding: t.Optional[str] = None, - errors: t.Optional[str] = "strict", - lazy: bool = False, - atomic: bool = False, -) -> t.IO[t.Any]: - """Open a file, with extra behavior to handle ``'-'`` to indicate - a standard stream, lazy open on write, and atomic write. Similar to - the behavior of the :class:`~click.File` param type. - - If ``'-'`` is given to open ``stdout`` or ``stdin``, the stream is - wrapped so that using it in a context manager will not close it. - This makes it possible to use the function without accidentally - closing a standard stream: - - .. code-block:: python - - with open_file(filename) as f: - ... - - :param filename: The name of the file to open, or ``'-'`` for - ``stdin``/``stdout``. - :param mode: The mode in which to open the file. - :param encoding: The encoding to decode or encode a file opened in - text mode. - :param errors: The error handling mode. - :param lazy: Wait to open the file until it is accessed. For read - mode, the file is temporarily opened to raise access errors - early, then closed until it is read again. - :param atomic: Write to a temporary file and replace the given file - on close. - - .. versionadded:: 3.0 - """ - if lazy: - return t.cast( - t.IO[t.Any], LazyFile(filename, mode, encoding, errors, atomic=atomic) - ) - - f, should_close = open_stream(filename, mode, encoding, errors, atomic=atomic) - - if not should_close: - f = t.cast(t.IO[t.Any], KeepOpenFile(f)) - - return f - - -def format_filename( - filename: "t.Union[str, bytes, os.PathLike[str], os.PathLike[bytes]]", - shorten: bool = False, -) -> str: - """Format a filename as a string for display. Ensures the filename can be - displayed by replacing any invalid bytes or surrogate escapes in the name - with the replacement character ``�``. - - Invalid bytes or surrogate escapes will raise an error when written to a - stream with ``errors="strict". This will typically happen with ``stdout`` - when the locale is something like ``en_GB.UTF-8``. - - Many scenarios *are* safe to write surrogates though, due to PEP 538 and - PEP 540, including: - - - Writing to ``stderr``, which uses ``errors="backslashreplace"``. - - The system has ``LANG=C.UTF-8``, ``C``, or ``POSIX``. Python opens - stdout and stderr with ``errors="surrogateescape"``. - - None of ``LANG/LC_*`` are set. Python assumes ``LANG=C.UTF-8``. - - Python is started in UTF-8 mode with ``PYTHONUTF8=1`` or ``-X utf8``. - Python opens stdout and stderr with ``errors="surrogateescape"``. - - :param filename: formats a filename for UI display. This will also convert - the filename into unicode without failing. - :param shorten: this optionally shortens the filename to strip of the - path that leads up to it. - """ - if shorten: - filename = os.path.basename(filename) - else: - filename = os.fspath(filename) - - if isinstance(filename, bytes): - filename = filename.decode(sys.getfilesystemencoding(), "replace") - else: - filename = filename.encode("utf-8", "surrogateescape").decode( - "utf-8", "replace" - ) - - return filename - - -def get_app_dir(app_name: str, roaming: bool = True, force_posix: bool = False) -> str: - r"""Returns the config folder for the application. The default behavior - is to return whatever is most appropriate for the operating system. - - To give you an idea, for an app called ``"Foo Bar"``, something like - the following folders could be returned: - - Mac OS X: - ``~/Library/Application Support/Foo Bar`` - Mac OS X (POSIX): - ``~/.foo-bar`` - Unix: - ``~/.config/foo-bar`` - Unix (POSIX): - ``~/.foo-bar`` - Windows (roaming): - ``C:\Users\\AppData\Roaming\Foo Bar`` - Windows (not roaming): - ``C:\Users\\AppData\Local\Foo Bar`` - - .. versionadded:: 2.0 - - :param app_name: the application name. This should be properly capitalized - and can contain whitespace. - :param roaming: controls if the folder should be roaming or not on Windows. - Has no effect otherwise. - :param force_posix: if this is set to `True` then on any POSIX system the - folder will be stored in the home folder with a leading - dot instead of the XDG config home or darwin's - application support folder. - """ - if WIN: - key = "APPDATA" if roaming else "LOCALAPPDATA" - folder = os.environ.get(key) - if folder is None: - folder = os.path.expanduser("~") - return os.path.join(folder, app_name) - if force_posix: - return os.path.join(os.path.expanduser(f"~/.{_posixify(app_name)}")) - if sys.platform == "darwin": - return os.path.join( - os.path.expanduser("~/Library/Application Support"), app_name - ) - return os.path.join( - os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")), - _posixify(app_name), - ) - - -class PacifyFlushWrapper: - """This wrapper is used to catch and suppress BrokenPipeErrors resulting - from ``.flush()`` being called on broken pipe during the shutdown/final-GC - of the Python interpreter. Notably ``.flush()`` is always called on - ``sys.stdout`` and ``sys.stderr``. So as to have minimal impact on any - other cleanup code, and the case where the underlying file is not a broken - pipe, all calls and attributes are proxied. - """ - - def __init__(self, wrapped: t.IO[t.Any]) -> None: - self.wrapped = wrapped - - def flush(self) -> None: - try: - self.wrapped.flush() - except OSError as e: - import errno - - if e.errno != errno.EPIPE: - raise - - def __getattr__(self, attr: str) -> t.Any: - return getattr(self.wrapped, attr) - - -def _detect_program_name( - path: t.Optional[str] = None, _main: t.Optional[ModuleType] = None -) -> str: - """Determine the command used to run the program, for use in help - text. If a file or entry point was executed, the file name is - returned. If ``python -m`` was used to execute a module or package, - ``python -m name`` is returned. - - This doesn't try to be too precise, the goal is to give a concise - name for help text. Files are only shown as their name without the - path. ``python`` is only shown for modules, and the full path to - ``sys.executable`` is not shown. - - :param path: The Python file being executed. Python puts this in - ``sys.argv[0]``, which is used by default. - :param _main: The ``__main__`` module. This should only be passed - during internal testing. - - .. versionadded:: 8.0 - Based on command args detection in the Werkzeug reloader. - - :meta private: - """ - if _main is None: - _main = sys.modules["__main__"] - - if not path: - path = sys.argv[0] - - # The value of __package__ indicates how Python was called. It may - # not exist if a setuptools script is installed as an egg. It may be - # set incorrectly for entry points created with pip on Windows. - # It is set to "" inside a Shiv or PEX zipapp. - if getattr(_main, "__package__", None) in {None, ""} or ( - os.name == "nt" - and _main.__package__ == "" - and not os.path.exists(path) - and os.path.exists(f"{path}.exe") - ): - # Executed a file, like "python app.py". - return os.path.basename(path) - - # Executed a module, like "python -m example". - # Rewritten by Python from "-m script" to "/path/to/script.py". - # Need to look at main module to determine how it was executed. - py_module = t.cast(str, _main.__package__) - name = os.path.splitext(os.path.basename(path))[0] - - # A submodule like "example.cli". - if name != "__main__": - py_module = f"{py_module}.{name}" - - return f"python -m {py_module.lstrip('.')}" - - -def _expand_args( - args: t.Iterable[str], - *, - user: bool = True, - env: bool = True, - glob_recursive: bool = True, -) -> t.List[str]: - """Simulate Unix shell expansion with Python functions. - - See :func:`glob.glob`, :func:`os.path.expanduser`, and - :func:`os.path.expandvars`. - - This is intended for use on Windows, where the shell does not do any - expansion. It may not exactly match what a Unix shell would do. - - :param args: List of command line arguments to expand. - :param user: Expand user home directory. - :param env: Expand environment variables. - :param glob_recursive: ``**`` matches directories recursively. - - .. versionchanged:: 8.1 - Invalid glob patterns are treated as empty expansions rather - than raising an error. - - .. versionadded:: 8.0 - - :meta private: - """ - from glob import glob - - out = [] - - for arg in args: - if user: - arg = os.path.expanduser(arg) - - if env: - arg = os.path.expandvars(arg) - - try: - matches = glob(arg, recursive=glob_recursive) - except re.error: - matches = [] - - if not matches: - out.append(arg) - else: - out.extend(matches) - - return out diff --git a/myenv/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER b/myenv/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/colorama-0.4.6.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/colorama-0.4.6.dist-info/METADATA b/myenv/Lib/site-packages/colorama-0.4.6.dist-info/METADATA deleted file mode 100644 index a1b5c57..0000000 --- a/myenv/Lib/site-packages/colorama-0.4.6.dist-info/METADATA +++ /dev/null @@ -1,441 +0,0 @@ -Metadata-Version: 2.1 -Name: colorama -Version: 0.4.6 -Summary: Cross-platform colored terminal text. -Project-URL: Homepage, https://github.com/tartley/colorama -Author-email: Jonathan Hartley -License-File: LICENSE.txt -Keywords: ansi,color,colour,crossplatform,terminal,text,windows,xplatform -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Console -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Terminals -Requires-Python: !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7 -Description-Content-Type: text/x-rst - -.. image:: https://img.shields.io/pypi/v/colorama.svg - :target: https://pypi.org/project/colorama/ - :alt: Latest Version - -.. image:: https://img.shields.io/pypi/pyversions/colorama.svg - :target: https://pypi.org/project/colorama/ - :alt: Supported Python versions - -.. image:: https://github.com/tartley/colorama/actions/workflows/test.yml/badge.svg - :target: https://github.com/tartley/colorama/actions/workflows/test.yml - :alt: Build Status - -Colorama -======== - -Makes ANSI escape character sequences (for producing colored terminal text and -cursor positioning) work under MS Windows. - -.. |donate| image:: https://www.paypalobjects.com/en_US/i/btn/btn_donate_SM.gif - :target: https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=2MZ9D2GMLYCUJ&item_name=Colorama¤cy_code=USD - :alt: Donate with Paypal - -`PyPI for releases `_ | -`Github for source `_ | -`Colorama for enterprise on Tidelift `_ - -If you find Colorama useful, please |donate| to the authors. Thank you! - -Installation ------------- - -Tested on CPython 2.7, 3.7, 3.8, 3.9 and 3.10 and Pypy 2.7 and 3.8. - -No requirements other than the standard library. - -.. code-block:: bash - - pip install colorama - # or - conda install -c anaconda colorama - -Description ------------ - -ANSI escape character sequences have long been used to produce colored terminal -text and cursor positioning on Unix and Macs. Colorama makes this work on -Windows, too, by wrapping ``stdout``, stripping ANSI sequences it finds (which -would appear as gobbledygook in the output), and converting them into the -appropriate win32 calls to modify the state of the terminal. On other platforms, -Colorama does nothing. - -This has the upshot of providing a simple cross-platform API for printing -colored terminal text from Python, and has the happy side-effect that existing -applications or libraries which use ANSI sequences to produce colored output on -Linux or Macs can now also work on Windows, simply by calling -``colorama.just_fix_windows_console()`` (since v0.4.6) or ``colorama.init()`` -(all versions, but may have other side-effects – see below). - -An alternative approach is to install ``ansi.sys`` on Windows machines, which -provides the same behaviour for all applications running in terminals. Colorama -is intended for situations where that isn't easy (e.g., maybe your app doesn't -have an installer.) - -Demo scripts in the source code repository print some colored text using -ANSI sequences. Compare their output under Gnome-terminal's built in ANSI -handling, versus on Windows Command-Prompt using Colorama: - -.. image:: https://github.com/tartley/colorama/raw/master/screenshots/ubuntu-demo.png - :width: 661 - :height: 357 - :alt: ANSI sequences on Ubuntu under gnome-terminal. - -.. image:: https://github.com/tartley/colorama/raw/master/screenshots/windows-demo.png - :width: 668 - :height: 325 - :alt: Same ANSI sequences on Windows, using Colorama. - -These screenshots show that, on Windows, Colorama does not support ANSI 'dim -text'; it looks the same as 'normal text'. - -Usage ------ - -Initialisation -.............. - -If the only thing you want from Colorama is to get ANSI escapes to work on -Windows, then run: - -.. code-block:: python - - from colorama import just_fix_windows_console - just_fix_windows_console() - -If you're on a recent version of Windows 10 or better, and your stdout/stderr -are pointing to a Windows console, then this will flip the magic configuration -switch to enable Windows' built-in ANSI support. - -If you're on an older version of Windows, and your stdout/stderr are pointing to -a Windows console, then this will wrap ``sys.stdout`` and/or ``sys.stderr`` in a -magic file object that intercepts ANSI escape sequences and issues the -appropriate Win32 calls to emulate them. - -In all other circumstances, it does nothing whatsoever. Basically the idea is -that this makes Windows act like Unix with respect to ANSI escape handling. - -It's safe to call this function multiple times. It's safe to call this function -on non-Windows platforms, but it won't do anything. It's safe to call this -function when one or both of your stdout/stderr are redirected to a file – it -won't do anything to those streams. - -Alternatively, you can use the older interface with more features (but also more -potential footguns): - -.. code-block:: python - - from colorama import init - init() - -This does the same thing as ``just_fix_windows_console``, except for the -following differences: - -- It's not safe to call ``init`` multiple times; you can end up with multiple - layers of wrapping and broken ANSI support. - -- Colorama will apply a heuristic to guess whether stdout/stderr support ANSI, - and if it thinks they don't, then it will wrap ``sys.stdout`` and - ``sys.stderr`` in a magic file object that strips out ANSI escape sequences - before printing them. This happens on all platforms, and can be convenient if - you want to write your code to emit ANSI escape sequences unconditionally, and - let Colorama decide whether they should actually be output. But note that - Colorama's heuristic is not particularly clever. - -- ``init`` also accepts explicit keyword args to enable/disable various - functionality – see below. - -To stop using Colorama before your program exits, simply call ``deinit()``. -This will restore ``stdout`` and ``stderr`` to their original values, so that -Colorama is disabled. To resume using Colorama again, call ``reinit()``; it is -cheaper than calling ``init()`` again (but does the same thing). - -Most users should depend on ``colorama >= 0.4.6``, and use -``just_fix_windows_console``. The old ``init`` interface will be supported -indefinitely for backwards compatibility, but we don't plan to fix any issues -with it, also for backwards compatibility. - -Colored Output -.............. - -Cross-platform printing of colored text can then be done using Colorama's -constant shorthand for ANSI escape sequences. These are deliberately -rudimentary, see below. - -.. code-block:: python - - from colorama import Fore, Back, Style - print(Fore.RED + 'some red text') - print(Back.GREEN + 'and with a green background') - print(Style.DIM + 'and in dim text') - print(Style.RESET_ALL) - print('back to normal now') - -...or simply by manually printing ANSI sequences from your own code: - -.. code-block:: python - - print('\033[31m' + 'some red text') - print('\033[39m') # and reset to default color - -...or, Colorama can be used in conjunction with existing ANSI libraries -such as the venerable `Termcolor `_ -the fabulous `Blessings `_, -or the incredible `_Rich `_. - -If you wish Colorama's Fore, Back and Style constants were more capable, -then consider using one of the above highly capable libraries to generate -colors, etc, and use Colorama just for its primary purpose: to convert -those ANSI sequences to also work on Windows: - -SIMILARLY, do not send PRs adding the generation of new ANSI types to Colorama. -We are only interested in converting ANSI codes to win32 API calls, not -shortcuts like the above to generate ANSI characters. - -.. code-block:: python - - from colorama import just_fix_windows_console - from termcolor import colored - - # use Colorama to make Termcolor work on Windows too - just_fix_windows_console() - - # then use Termcolor for all colored text output - print(colored('Hello, World!', 'green', 'on_red')) - -Available formatting constants are:: - - Fore: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET. - Back: BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, RESET. - Style: DIM, NORMAL, BRIGHT, RESET_ALL - -``Style.RESET_ALL`` resets foreground, background, and brightness. Colorama will -perform this reset automatically on program exit. - -These are fairly well supported, but not part of the standard:: - - Fore: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX - Back: LIGHTBLACK_EX, LIGHTRED_EX, LIGHTGREEN_EX, LIGHTYELLOW_EX, LIGHTBLUE_EX, LIGHTMAGENTA_EX, LIGHTCYAN_EX, LIGHTWHITE_EX - -Cursor Positioning -.................. - -ANSI codes to reposition the cursor are supported. See ``demos/demo06.py`` for -an example of how to generate them. - -Init Keyword Args -................. - -``init()`` accepts some ``**kwargs`` to override default behaviour. - -init(autoreset=False): - If you find yourself repeatedly sending reset sequences to turn off color - changes at the end of every print, then ``init(autoreset=True)`` will - automate that: - - .. code-block:: python - - from colorama import init - init(autoreset=True) - print(Fore.RED + 'some red text') - print('automatically back to default color again') - -init(strip=None): - Pass ``True`` or ``False`` to override whether ANSI codes should be - stripped from the output. The default behaviour is to strip if on Windows - or if output is redirected (not a tty). - -init(convert=None): - Pass ``True`` or ``False`` to override whether to convert ANSI codes in the - output into win32 calls. The default behaviour is to convert if on Windows - and output is to a tty (terminal). - -init(wrap=True): - On Windows, Colorama works by replacing ``sys.stdout`` and ``sys.stderr`` - with proxy objects, which override the ``.write()`` method to do their work. - If this wrapping causes you problems, then this can be disabled by passing - ``init(wrap=False)``. The default behaviour is to wrap if ``autoreset`` or - ``strip`` or ``convert`` are True. - - When wrapping is disabled, colored printing on non-Windows platforms will - continue to work as normal. To do cross-platform colored output, you can - use Colorama's ``AnsiToWin32`` proxy directly: - - .. code-block:: python - - import sys - from colorama import init, AnsiToWin32 - init(wrap=False) - stream = AnsiToWin32(sys.stderr).stream - - # Python 2 - print >>stream, Fore.BLUE + 'blue text on stderr' - - # Python 3 - print(Fore.BLUE + 'blue text on stderr', file=stream) - -Recognised ANSI Sequences -......................... - -ANSI sequences generally take the form:: - - ESC [ ; ... - -Where ```` is an integer, and ```` is a single letter. Zero or -more params are passed to a ````. If no params are passed, it is -generally synonymous with passing a single zero. No spaces exist in the -sequence; they have been inserted here simply to read more easily. - -The only ANSI sequences that Colorama converts into win32 calls are:: - - ESC [ 0 m # reset all (colors and brightness) - ESC [ 1 m # bright - ESC [ 2 m # dim (looks same as normal brightness) - ESC [ 22 m # normal brightness - - # FOREGROUND: - ESC [ 30 m # black - ESC [ 31 m # red - ESC [ 32 m # green - ESC [ 33 m # yellow - ESC [ 34 m # blue - ESC [ 35 m # magenta - ESC [ 36 m # cyan - ESC [ 37 m # white - ESC [ 39 m # reset - - # BACKGROUND - ESC [ 40 m # black - ESC [ 41 m # red - ESC [ 42 m # green - ESC [ 43 m # yellow - ESC [ 44 m # blue - ESC [ 45 m # magenta - ESC [ 46 m # cyan - ESC [ 47 m # white - ESC [ 49 m # reset - - # cursor positioning - ESC [ y;x H # position cursor at x across, y down - ESC [ y;x f # position cursor at x across, y down - ESC [ n A # move cursor n lines up - ESC [ n B # move cursor n lines down - ESC [ n C # move cursor n characters forward - ESC [ n D # move cursor n characters backward - - # clear the screen - ESC [ mode J # clear the screen - - # clear the line - ESC [ mode K # clear the line - -Multiple numeric params to the ``'m'`` command can be combined into a single -sequence:: - - ESC [ 36 ; 45 ; 1 m # bright cyan text on magenta background - -All other ANSI sequences of the form ``ESC [ ; ... `` -are silently stripped from the output on Windows. - -Any other form of ANSI sequence, such as single-character codes or alternative -initial characters, are not recognised or stripped. It would be cool to add -them though. Let me know if it would be useful for you, via the Issues on -GitHub. - -Status & Known Problems ------------------------ - -I've personally only tested it on Windows XP (CMD, Console2), Ubuntu -(gnome-terminal, xterm), and OS X. - -Some valid ANSI sequences aren't recognised. - -If you're hacking on the code, see `README-hacking.md`_. ESPECIALLY, see the -explanation there of why we do not want PRs that allow Colorama to generate new -types of ANSI codes. - -See outstanding issues and wish-list: -https://github.com/tartley/colorama/issues - -If anything doesn't work for you, or doesn't do what you expected or hoped for, -I'd love to hear about it on that issues list, would be delighted by patches, -and would be happy to grant commit access to anyone who submits a working patch -or two. - -.. _README-hacking.md: README-hacking.md - -License -------- - -Copyright Jonathan Hartley & Arnon Yaari, 2013-2020. BSD 3-Clause license; see -LICENSE file. - -Professional support --------------------- - -.. |tideliftlogo| image:: https://cdn2.hubspot.net/hubfs/4008838/website/logos/logos_for_download/Tidelift_primary-shorthand-logo.png - :alt: Tidelift - :target: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme - -.. list-table:: - :widths: 10 100 - - * - |tideliftlogo| - - Professional support for colorama is available as part of the - `Tidelift Subscription`_. - Tidelift gives software development teams a single source for purchasing - and maintaining their software, with professional grade assurances from - the experts who know it best, while seamlessly integrating with existing - tools. - -.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-colorama?utm_source=pypi-colorama&utm_medium=referral&utm_campaign=readme - -Thanks ------- - -See the CHANGELOG for more thanks! - -* Marc Schlaich (schlamar) for a ``setup.py`` fix for Python2.5. -* Marc Abramowitz, reported & fixed a crash on exit with closed ``stdout``, - providing a solution to issue #7's setuptools/distutils debate, - and other fixes. -* User 'eryksun', for guidance on correctly instantiating ``ctypes.windll``. -* Matthew McCormick for politely pointing out a longstanding crash on non-Win. -* Ben Hoyt, for a magnificent fix under 64-bit Windows. -* Jesse at Empty Square for submitting a fix for examples in the README. -* User 'jamessp', an observant documentation fix for cursor positioning. -* User 'vaal1239', Dave Mckee & Lackner Kristof for a tiny but much-needed Win7 - fix. -* Julien Stuyck, for wisely suggesting Python3 compatible updates to README. -* Daniel Griffith for multiple fabulous patches. -* Oscar Lesta for a valuable fix to stop ANSI chars being sent to non-tty - output. -* Roger Binns, for many suggestions, valuable feedback, & bug reports. -* Tim Golden for thought and much appreciated feedback on the initial idea. -* User 'Zearin' for updates to the README file. -* John Szakmeister for adding support for light colors -* Charles Merriam for adding documentation to demos -* Jurko for a fix on 64-bit Windows CPython2.5 w/o ctypes -* Florian Bruhin for a fix when stdout or stderr are None -* Thomas Weininger for fixing ValueError on Windows -* Remi Rampin for better Github integration and fixes to the README file -* Simeon Visser for closing a file handle using 'with' and updating classifiers - to include Python 3.3 and 3.4 -* Andy Neff for fixing RESET of LIGHT_EX colors. -* Jonathan Hartley for the initial idea and implementation. diff --git a/myenv/Lib/site-packages/colorama-0.4.6.dist-info/RECORD b/myenv/Lib/site-packages/colorama-0.4.6.dist-info/RECORD deleted file mode 100644 index cd6b130..0000000 --- a/myenv/Lib/site-packages/colorama-0.4.6.dist-info/RECORD +++ /dev/null @@ -1,31 +0,0 @@ -colorama-0.4.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -colorama-0.4.6.dist-info/METADATA,sha256=e67SnrUMOym9sz_4TjF3vxvAV4T3aF7NyqRHHH3YEMw,17158 -colorama-0.4.6.dist-info/RECORD,, -colorama-0.4.6.dist-info/WHEEL,sha256=cdcF4Fbd0FPtw2EMIOwH-3rSOTUdTCeOSXRMD1iLUb8,105 -colorama-0.4.6.dist-info/licenses/LICENSE.txt,sha256=ysNcAmhuXQSlpxQL-zs25zrtSWZW6JEQLkKIhteTAxg,1491 -colorama/__init__.py,sha256=wePQA4U20tKgYARySLEC047ucNX-g8pRLpYBuiHlLb8,266 -colorama/__pycache__/__init__.cpython-312.pyc,, -colorama/__pycache__/ansi.cpython-312.pyc,, -colorama/__pycache__/ansitowin32.cpython-312.pyc,, -colorama/__pycache__/initialise.cpython-312.pyc,, -colorama/__pycache__/win32.cpython-312.pyc,, -colorama/__pycache__/winterm.cpython-312.pyc,, -colorama/ansi.py,sha256=Top4EeEuaQdBWdteKMEcGOTeKeF19Q-Wo_6_Cj5kOzQ,2522 -colorama/ansitowin32.py,sha256=vPNYa3OZbxjbuFyaVo0Tmhmy1FZ1lKMWCnT7odXpItk,11128 -colorama/initialise.py,sha256=-hIny86ClXo39ixh5iSCfUIa2f_h_bgKRDW7gqs-KLU,3325 -colorama/tests/__init__.py,sha256=MkgPAEzGQd-Rq0w0PZXSX2LadRWhUECcisJY8lSrm4Q,75 -colorama/tests/__pycache__/__init__.cpython-312.pyc,, -colorama/tests/__pycache__/ansi_test.cpython-312.pyc,, -colorama/tests/__pycache__/ansitowin32_test.cpython-312.pyc,, -colorama/tests/__pycache__/initialise_test.cpython-312.pyc,, -colorama/tests/__pycache__/isatty_test.cpython-312.pyc,, -colorama/tests/__pycache__/utils.cpython-312.pyc,, -colorama/tests/__pycache__/winterm_test.cpython-312.pyc,, -colorama/tests/ansi_test.py,sha256=FeViDrUINIZcr505PAxvU4AjXz1asEiALs9GXMhwRaE,2839 -colorama/tests/ansitowin32_test.py,sha256=RN7AIhMJ5EqDsYaCjVo-o4u8JzDD4ukJbmevWKS70rY,10678 -colorama/tests/initialise_test.py,sha256=BbPy-XfyHwJ6zKozuQOvNvQZzsx9vdb_0bYXn7hsBTc,6741 -colorama/tests/isatty_test.py,sha256=Pg26LRpv0yQDB5Ac-sxgVXG7hsA1NYvapFgApZfYzZg,1866 -colorama/tests/utils.py,sha256=1IIRylG39z5-dzq09R_ngufxyPZxgldNbrxKxUGwGKE,1079 -colorama/tests/winterm_test.py,sha256=qoWFPEjym5gm2RuMwpf3pOis3a5r_PJZFCzK254JL8A,3709 -colorama/win32.py,sha256=YQOKwMTwtGBbsY4dL5HYTvwTeP9wIQra5MvPNddpxZs,6181 -colorama/winterm.py,sha256=XCQFDHjPi6AHYNdZwy0tA02H-Jh48Jp-HvCjeLeLp3U,7134 diff --git a/myenv/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL b/myenv/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL deleted file mode 100644 index d79189f..0000000 --- a/myenv/Lib/site-packages/colorama-0.4.6.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.11.1 -Root-Is-Purelib: true -Tag: py2-none-any -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt b/myenv/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt deleted file mode 100644 index 3105888..0000000 --- a/myenv/Lib/site-packages/colorama-0.4.6.dist-info/licenses/LICENSE.txt +++ /dev/null @@ -1,27 +0,0 @@ -Copyright (c) 2010 Jonathan Hartley -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holders, nor those of its contributors - may be used to endorse or promote products derived from this software without - specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/Lib/site-packages/colorama/__init__.py b/myenv/Lib/site-packages/colorama/__init__.py deleted file mode 100644 index 383101c..0000000 --- a/myenv/Lib/site-packages/colorama/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from .initialise import init, deinit, reinit, colorama_text, just_fix_windows_console -from .ansi import Fore, Back, Style, Cursor -from .ansitowin32 import AnsiToWin32 - -__version__ = '0.4.6' - diff --git a/myenv/Lib/site-packages/colorama/ansi.py b/myenv/Lib/site-packages/colorama/ansi.py deleted file mode 100644 index 11ec695..0000000 --- a/myenv/Lib/site-packages/colorama/ansi.py +++ /dev/null @@ -1,102 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -''' -This module generates ANSI character codes to printing colors to terminals. -See: http://en.wikipedia.org/wiki/ANSI_escape_code -''' - -CSI = '\033[' -OSC = '\033]' -BEL = '\a' - - -def code_to_chars(code): - return CSI + str(code) + 'm' - -def set_title(title): - return OSC + '2;' + title + BEL - -def clear_screen(mode=2): - return CSI + str(mode) + 'J' - -def clear_line(mode=2): - return CSI + str(mode) + 'K' - - -class AnsiCodes(object): - def __init__(self): - # the subclasses declare class attributes which are numbers. - # Upon instantiation we define instance attributes, which are the same - # as the class attributes but wrapped with the ANSI escape sequence - for name in dir(self): - if not name.startswith('_'): - value = getattr(self, name) - setattr(self, name, code_to_chars(value)) - - -class AnsiCursor(object): - def UP(self, n=1): - return CSI + str(n) + 'A' - def DOWN(self, n=1): - return CSI + str(n) + 'B' - def FORWARD(self, n=1): - return CSI + str(n) + 'C' - def BACK(self, n=1): - return CSI + str(n) + 'D' - def POS(self, x=1, y=1): - return CSI + str(y) + ';' + str(x) + 'H' - - -class AnsiFore(AnsiCodes): - BLACK = 30 - RED = 31 - GREEN = 32 - YELLOW = 33 - BLUE = 34 - MAGENTA = 35 - CYAN = 36 - WHITE = 37 - RESET = 39 - - # These are fairly well supported, but not part of the standard. - LIGHTBLACK_EX = 90 - LIGHTRED_EX = 91 - LIGHTGREEN_EX = 92 - LIGHTYELLOW_EX = 93 - LIGHTBLUE_EX = 94 - LIGHTMAGENTA_EX = 95 - LIGHTCYAN_EX = 96 - LIGHTWHITE_EX = 97 - - -class AnsiBack(AnsiCodes): - BLACK = 40 - RED = 41 - GREEN = 42 - YELLOW = 43 - BLUE = 44 - MAGENTA = 45 - CYAN = 46 - WHITE = 47 - RESET = 49 - - # These are fairly well supported, but not part of the standard. - LIGHTBLACK_EX = 100 - LIGHTRED_EX = 101 - LIGHTGREEN_EX = 102 - LIGHTYELLOW_EX = 103 - LIGHTBLUE_EX = 104 - LIGHTMAGENTA_EX = 105 - LIGHTCYAN_EX = 106 - LIGHTWHITE_EX = 107 - - -class AnsiStyle(AnsiCodes): - BRIGHT = 1 - DIM = 2 - NORMAL = 22 - RESET_ALL = 0 - -Fore = AnsiFore() -Back = AnsiBack() -Style = AnsiStyle() -Cursor = AnsiCursor() diff --git a/myenv/Lib/site-packages/colorama/ansitowin32.py b/myenv/Lib/site-packages/colorama/ansitowin32.py deleted file mode 100644 index abf209e..0000000 --- a/myenv/Lib/site-packages/colorama/ansitowin32.py +++ /dev/null @@ -1,277 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import re -import sys -import os - -from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL -from .winterm import enable_vt_processing, WinTerm, WinColor, WinStyle -from .win32 import windll, winapi_test - - -winterm = None -if windll is not None: - winterm = WinTerm() - - -class StreamWrapper(object): - ''' - Wraps a stream (such as stdout), acting as a transparent proxy for all - attribute access apart from method 'write()', which is delegated to our - Converter instance. - ''' - def __init__(self, wrapped, converter): - # double-underscore everything to prevent clashes with names of - # attributes on the wrapped stream object. - self.__wrapped = wrapped - self.__convertor = converter - - def __getattr__(self, name): - return getattr(self.__wrapped, name) - - def __enter__(self, *args, **kwargs): - # special method lookup bypasses __getattr__/__getattribute__, see - # https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit - # thus, contextlib magic methods are not proxied via __getattr__ - return self.__wrapped.__enter__(*args, **kwargs) - - def __exit__(self, *args, **kwargs): - return self.__wrapped.__exit__(*args, **kwargs) - - def __setstate__(self, state): - self.__dict__ = state - - def __getstate__(self): - return self.__dict__ - - def write(self, text): - self.__convertor.write(text) - - def isatty(self): - stream = self.__wrapped - if 'PYCHARM_HOSTED' in os.environ: - if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__): - return True - try: - stream_isatty = stream.isatty - except AttributeError: - return False - else: - return stream_isatty() - - @property - def closed(self): - stream = self.__wrapped - try: - return stream.closed - # AttributeError in the case that the stream doesn't support being closed - # ValueError for the case that the stream has already been detached when atexit runs - except (AttributeError, ValueError): - return True - - -class AnsiToWin32(object): - ''' - Implements a 'write()' method which, on Windows, will strip ANSI character - sequences from the text, and if outputting to a tty, will convert them into - win32 function calls. - ''' - ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer - ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command - - def __init__(self, wrapped, convert=None, strip=None, autoreset=False): - # The wrapped stream (normally sys.stdout or sys.stderr) - self.wrapped = wrapped - - # should we reset colors to defaults after every .write() - self.autoreset = autoreset - - # create the proxy wrapping our output stream - self.stream = StreamWrapper(wrapped, self) - - on_windows = os.name == 'nt' - # We test if the WinAPI works, because even if we are on Windows - # we may be using a terminal that doesn't support the WinAPI - # (e.g. Cygwin Terminal). In this case it's up to the terminal - # to support the ANSI codes. - conversion_supported = on_windows and winapi_test() - try: - fd = wrapped.fileno() - except Exception: - fd = -1 - system_has_native_ansi = not on_windows or enable_vt_processing(fd) - have_tty = not self.stream.closed and self.stream.isatty() - need_conversion = conversion_supported and not system_has_native_ansi - - # should we strip ANSI sequences from our output? - if strip is None: - strip = need_conversion or not have_tty - self.strip = strip - - # should we should convert ANSI sequences into win32 calls? - if convert is None: - convert = need_conversion and have_tty - self.convert = convert - - # dict of ansi codes to win32 functions and parameters - self.win32_calls = self.get_win32_calls() - - # are we wrapping stderr? - self.on_stderr = self.wrapped is sys.stderr - - def should_wrap(self): - ''' - True if this class is actually needed. If false, then the output - stream will not be affected, nor will win32 calls be issued, so - wrapping stdout is not actually required. This will generally be - False on non-Windows platforms, unless optional functionality like - autoreset has been requested using kwargs to init() - ''' - return self.convert or self.strip or self.autoreset - - def get_win32_calls(self): - if self.convert and winterm: - return { - AnsiStyle.RESET_ALL: (winterm.reset_all, ), - AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT), - AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL), - AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL), - AnsiFore.BLACK: (winterm.fore, WinColor.BLACK), - AnsiFore.RED: (winterm.fore, WinColor.RED), - AnsiFore.GREEN: (winterm.fore, WinColor.GREEN), - AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW), - AnsiFore.BLUE: (winterm.fore, WinColor.BLUE), - AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA), - AnsiFore.CYAN: (winterm.fore, WinColor.CYAN), - AnsiFore.WHITE: (winterm.fore, WinColor.GREY), - AnsiFore.RESET: (winterm.fore, ), - AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True), - AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True), - AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True), - AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True), - AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True), - AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True), - AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True), - AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True), - AnsiBack.BLACK: (winterm.back, WinColor.BLACK), - AnsiBack.RED: (winterm.back, WinColor.RED), - AnsiBack.GREEN: (winterm.back, WinColor.GREEN), - AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW), - AnsiBack.BLUE: (winterm.back, WinColor.BLUE), - AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA), - AnsiBack.CYAN: (winterm.back, WinColor.CYAN), - AnsiBack.WHITE: (winterm.back, WinColor.GREY), - AnsiBack.RESET: (winterm.back, ), - AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True), - AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True), - AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True), - AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True), - AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True), - AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True), - AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True), - AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True), - } - return dict() - - def write(self, text): - if self.strip or self.convert: - self.write_and_convert(text) - else: - self.wrapped.write(text) - self.wrapped.flush() - if self.autoreset: - self.reset_all() - - - def reset_all(self): - if self.convert: - self.call_win32('m', (0,)) - elif not self.strip and not self.stream.closed: - self.wrapped.write(Style.RESET_ALL) - - - def write_and_convert(self, text): - ''' - Write the given text to our wrapped stream, stripping any ANSI - sequences from the text, and optionally converting them into win32 - calls. - ''' - cursor = 0 - text = self.convert_osc(text) - for match in self.ANSI_CSI_RE.finditer(text): - start, end = match.span() - self.write_plain_text(text, cursor, start) - self.convert_ansi(*match.groups()) - cursor = end - self.write_plain_text(text, cursor, len(text)) - - - def write_plain_text(self, text, start, end): - if start < end: - self.wrapped.write(text[start:end]) - self.wrapped.flush() - - - def convert_ansi(self, paramstring, command): - if self.convert: - params = self.extract_params(command, paramstring) - self.call_win32(command, params) - - - def extract_params(self, command, paramstring): - if command in 'Hf': - params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';')) - while len(params) < 2: - # defaults: - params = params + (1,) - else: - params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0) - if len(params) == 0: - # defaults: - if command in 'JKm': - params = (0,) - elif command in 'ABCD': - params = (1,) - - return params - - - def call_win32(self, command, params): - if command == 'm': - for param in params: - if param in self.win32_calls: - func_args = self.win32_calls[param] - func = func_args[0] - args = func_args[1:] - kwargs = dict(on_stderr=self.on_stderr) - func(*args, **kwargs) - elif command in 'J': - winterm.erase_screen(params[0], on_stderr=self.on_stderr) - elif command in 'K': - winterm.erase_line(params[0], on_stderr=self.on_stderr) - elif command in 'Hf': # cursor position - absolute - winterm.set_cursor_position(params, on_stderr=self.on_stderr) - elif command in 'ABCD': # cursor position - relative - n = params[0] - # A - up, B - down, C - forward, D - back - x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command] - winterm.cursor_adjust(x, y, on_stderr=self.on_stderr) - - - def convert_osc(self, text): - for match in self.ANSI_OSC_RE.finditer(text): - start, end = match.span() - text = text[:start] + text[end:] - paramstring, command = match.groups() - if command == BEL: - if paramstring.count(";") == 1: - params = paramstring.split(";") - # 0 - change title and icon (we will only change title) - # 1 - change icon (we don't support this) - # 2 - change title - if params[0] in '02': - winterm.set_title(params[1]) - return text - - - def flush(self): - self.wrapped.flush() diff --git a/myenv/Lib/site-packages/colorama/initialise.py b/myenv/Lib/site-packages/colorama/initialise.py deleted file mode 100644 index d5fd4b7..0000000 --- a/myenv/Lib/site-packages/colorama/initialise.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import atexit -import contextlib -import sys - -from .ansitowin32 import AnsiToWin32 - - -def _wipe_internal_state_for_tests(): - global orig_stdout, orig_stderr - orig_stdout = None - orig_stderr = None - - global wrapped_stdout, wrapped_stderr - wrapped_stdout = None - wrapped_stderr = None - - global atexit_done - atexit_done = False - - global fixed_windows_console - fixed_windows_console = False - - try: - # no-op if it wasn't registered - atexit.unregister(reset_all) - except AttributeError: - # python 2: no atexit.unregister. Oh well, we did our best. - pass - - -def reset_all(): - if AnsiToWin32 is not None: # Issue #74: objects might become None at exit - AnsiToWin32(orig_stdout).reset_all() - - -def init(autoreset=False, convert=None, strip=None, wrap=True): - - if not wrap and any([autoreset, convert, strip]): - raise ValueError('wrap=False conflicts with any other arg=True') - - global wrapped_stdout, wrapped_stderr - global orig_stdout, orig_stderr - - orig_stdout = sys.stdout - orig_stderr = sys.stderr - - if sys.stdout is None: - wrapped_stdout = None - else: - sys.stdout = wrapped_stdout = \ - wrap_stream(orig_stdout, convert, strip, autoreset, wrap) - if sys.stderr is None: - wrapped_stderr = None - else: - sys.stderr = wrapped_stderr = \ - wrap_stream(orig_stderr, convert, strip, autoreset, wrap) - - global atexit_done - if not atexit_done: - atexit.register(reset_all) - atexit_done = True - - -def deinit(): - if orig_stdout is not None: - sys.stdout = orig_stdout - if orig_stderr is not None: - sys.stderr = orig_stderr - - -def just_fix_windows_console(): - global fixed_windows_console - - if sys.platform != "win32": - return - if fixed_windows_console: - return - if wrapped_stdout is not None or wrapped_stderr is not None: - # Someone already ran init() and it did stuff, so we won't second-guess them - return - - # On newer versions of Windows, AnsiToWin32.__init__ will implicitly enable the - # native ANSI support in the console as a side-effect. We only need to actually - # replace sys.stdout/stderr if we're in the old-style conversion mode. - new_stdout = AnsiToWin32(sys.stdout, convert=None, strip=None, autoreset=False) - if new_stdout.convert: - sys.stdout = new_stdout - new_stderr = AnsiToWin32(sys.stderr, convert=None, strip=None, autoreset=False) - if new_stderr.convert: - sys.stderr = new_stderr - - fixed_windows_console = True - -@contextlib.contextmanager -def colorama_text(*args, **kwargs): - init(*args, **kwargs) - try: - yield - finally: - deinit() - - -def reinit(): - if wrapped_stdout is not None: - sys.stdout = wrapped_stdout - if wrapped_stderr is not None: - sys.stderr = wrapped_stderr - - -def wrap_stream(stream, convert, strip, autoreset, wrap): - if wrap: - wrapper = AnsiToWin32(stream, - convert=convert, strip=strip, autoreset=autoreset) - if wrapper.should_wrap(): - stream = wrapper.stream - return stream - - -# Use this for initial setup as well, to reduce code duplication -_wipe_internal_state_for_tests() diff --git a/myenv/Lib/site-packages/colorama/tests/__init__.py b/myenv/Lib/site-packages/colorama/tests/__init__.py deleted file mode 100644 index 8c5661e..0000000 --- a/myenv/Lib/site-packages/colorama/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. diff --git a/myenv/Lib/site-packages/colorama/tests/ansi_test.py b/myenv/Lib/site-packages/colorama/tests/ansi_test.py deleted file mode 100644 index 0a20c80..0000000 --- a/myenv/Lib/site-packages/colorama/tests/ansi_test.py +++ /dev/null @@ -1,76 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main - -from ..ansi import Back, Fore, Style -from ..ansitowin32 import AnsiToWin32 - -stdout_orig = sys.stdout -stderr_orig = sys.stderr - - -class AnsiTest(TestCase): - - def setUp(self): - # sanity check: stdout should be a file or StringIO object. - # It will only be AnsiToWin32 if init() has previously wrapped it - self.assertNotEqual(type(sys.stdout), AnsiToWin32) - self.assertNotEqual(type(sys.stderr), AnsiToWin32) - - def tearDown(self): - sys.stdout = stdout_orig - sys.stderr = stderr_orig - - - def testForeAttributes(self): - self.assertEqual(Fore.BLACK, '\033[30m') - self.assertEqual(Fore.RED, '\033[31m') - self.assertEqual(Fore.GREEN, '\033[32m') - self.assertEqual(Fore.YELLOW, '\033[33m') - self.assertEqual(Fore.BLUE, '\033[34m') - self.assertEqual(Fore.MAGENTA, '\033[35m') - self.assertEqual(Fore.CYAN, '\033[36m') - self.assertEqual(Fore.WHITE, '\033[37m') - self.assertEqual(Fore.RESET, '\033[39m') - - # Check the light, extended versions. - self.assertEqual(Fore.LIGHTBLACK_EX, '\033[90m') - self.assertEqual(Fore.LIGHTRED_EX, '\033[91m') - self.assertEqual(Fore.LIGHTGREEN_EX, '\033[92m') - self.assertEqual(Fore.LIGHTYELLOW_EX, '\033[93m') - self.assertEqual(Fore.LIGHTBLUE_EX, '\033[94m') - self.assertEqual(Fore.LIGHTMAGENTA_EX, '\033[95m') - self.assertEqual(Fore.LIGHTCYAN_EX, '\033[96m') - self.assertEqual(Fore.LIGHTWHITE_EX, '\033[97m') - - - def testBackAttributes(self): - self.assertEqual(Back.BLACK, '\033[40m') - self.assertEqual(Back.RED, '\033[41m') - self.assertEqual(Back.GREEN, '\033[42m') - self.assertEqual(Back.YELLOW, '\033[43m') - self.assertEqual(Back.BLUE, '\033[44m') - self.assertEqual(Back.MAGENTA, '\033[45m') - self.assertEqual(Back.CYAN, '\033[46m') - self.assertEqual(Back.WHITE, '\033[47m') - self.assertEqual(Back.RESET, '\033[49m') - - # Check the light, extended versions. - self.assertEqual(Back.LIGHTBLACK_EX, '\033[100m') - self.assertEqual(Back.LIGHTRED_EX, '\033[101m') - self.assertEqual(Back.LIGHTGREEN_EX, '\033[102m') - self.assertEqual(Back.LIGHTYELLOW_EX, '\033[103m') - self.assertEqual(Back.LIGHTBLUE_EX, '\033[104m') - self.assertEqual(Back.LIGHTMAGENTA_EX, '\033[105m') - self.assertEqual(Back.LIGHTCYAN_EX, '\033[106m') - self.assertEqual(Back.LIGHTWHITE_EX, '\033[107m') - - - def testStyleAttributes(self): - self.assertEqual(Style.DIM, '\033[2m') - self.assertEqual(Style.NORMAL, '\033[22m') - self.assertEqual(Style.BRIGHT, '\033[1m') - - -if __name__ == '__main__': - main() diff --git a/myenv/Lib/site-packages/colorama/tests/ansitowin32_test.py b/myenv/Lib/site-packages/colorama/tests/ansitowin32_test.py deleted file mode 100644 index 91ca551..0000000 --- a/myenv/Lib/site-packages/colorama/tests/ansitowin32_test.py +++ /dev/null @@ -1,294 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from io import StringIO, TextIOWrapper -from unittest import TestCase, main -try: - from contextlib import ExitStack -except ImportError: - # python 2 - from contextlib2 import ExitStack - -try: - from unittest.mock import MagicMock, Mock, patch -except ImportError: - from mock import MagicMock, Mock, patch - -from ..ansitowin32 import AnsiToWin32, StreamWrapper -from ..win32 import ENABLE_VIRTUAL_TERMINAL_PROCESSING -from .utils import osname - - -class StreamWrapperTest(TestCase): - - def testIsAProxy(self): - mockStream = Mock() - wrapper = StreamWrapper(mockStream, None) - self.assertTrue( wrapper.random_attr is mockStream.random_attr ) - - def testDelegatesWrite(self): - mockStream = Mock() - mockConverter = Mock() - wrapper = StreamWrapper(mockStream, mockConverter) - wrapper.write('hello') - self.assertTrue(mockConverter.write.call_args, (('hello',), {})) - - def testDelegatesContext(self): - mockConverter = Mock() - s = StringIO() - with StreamWrapper(s, mockConverter) as fp: - fp.write(u'hello') - self.assertTrue(s.closed) - - def testProxyNoContextManager(self): - mockStream = MagicMock() - mockStream.__enter__.side_effect = AttributeError() - mockConverter = Mock() - with self.assertRaises(AttributeError) as excinfo: - with StreamWrapper(mockStream, mockConverter) as wrapper: - wrapper.write('hello') - - def test_closed_shouldnt_raise_on_closed_stream(self): - stream = StringIO() - stream.close() - wrapper = StreamWrapper(stream, None) - self.assertEqual(wrapper.closed, True) - - def test_closed_shouldnt_raise_on_detached_stream(self): - stream = TextIOWrapper(StringIO()) - stream.detach() - wrapper = StreamWrapper(stream, None) - self.assertEqual(wrapper.closed, True) - -class AnsiToWin32Test(TestCase): - - def testInit(self): - mockStdout = Mock() - auto = Mock() - stream = AnsiToWin32(mockStdout, autoreset=auto) - self.assertEqual(stream.wrapped, mockStdout) - self.assertEqual(stream.autoreset, auto) - - @patch('colorama.ansitowin32.winterm', None) - @patch('colorama.ansitowin32.winapi_test', lambda *_: True) - def testStripIsTrueOnWindows(self): - with osname('nt'): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout) - self.assertTrue(stream.strip) - - def testStripIsFalseOffWindows(self): - with osname('posix'): - mockStdout = Mock(closed=False) - stream = AnsiToWin32(mockStdout) - self.assertFalse(stream.strip) - - def testWriteStripsAnsi(self): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout) - stream.wrapped = Mock() - stream.write_and_convert = Mock() - stream.strip = True - - stream.write('abc') - - self.assertFalse(stream.wrapped.write.called) - self.assertEqual(stream.write_and_convert.call_args, (('abc',), {})) - - def testWriteDoesNotStripAnsi(self): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout) - stream.wrapped = Mock() - stream.write_and_convert = Mock() - stream.strip = False - stream.convert = False - - stream.write('abc') - - self.assertFalse(stream.write_and_convert.called) - self.assertEqual(stream.wrapped.write.call_args, (('abc',), {})) - - def assert_autoresets(self, convert, autoreset=True): - stream = AnsiToWin32(Mock()) - stream.convert = convert - stream.reset_all = Mock() - stream.autoreset = autoreset - stream.winterm = Mock() - - stream.write('abc') - - self.assertEqual(stream.reset_all.called, autoreset) - - def testWriteAutoresets(self): - self.assert_autoresets(convert=True) - self.assert_autoresets(convert=False) - self.assert_autoresets(convert=True, autoreset=False) - self.assert_autoresets(convert=False, autoreset=False) - - def testWriteAndConvertWritesPlainText(self): - stream = AnsiToWin32(Mock()) - stream.write_and_convert( 'abc' ) - self.assertEqual( stream.wrapped.write.call_args, (('abc',), {}) ) - - def testWriteAndConvertStripsAllValidAnsi(self): - stream = AnsiToWin32(Mock()) - stream.call_win32 = Mock() - data = [ - 'abc\033[mdef', - 'abc\033[0mdef', - 'abc\033[2mdef', - 'abc\033[02mdef', - 'abc\033[002mdef', - 'abc\033[40mdef', - 'abc\033[040mdef', - 'abc\033[0;1mdef', - 'abc\033[40;50mdef', - 'abc\033[50;30;40mdef', - 'abc\033[Adef', - 'abc\033[0Gdef', - 'abc\033[1;20;128Hdef', - ] - for datum in data: - stream.wrapped.write.reset_mock() - stream.write_and_convert( datum ) - self.assertEqual( - [args[0] for args in stream.wrapped.write.call_args_list], - [ ('abc',), ('def',) ] - ) - - def testWriteAndConvertSkipsEmptySnippets(self): - stream = AnsiToWin32(Mock()) - stream.call_win32 = Mock() - stream.write_and_convert( '\033[40m\033[41m' ) - self.assertFalse( stream.wrapped.write.called ) - - def testWriteAndConvertCallsWin32WithParamsAndCommand(self): - stream = AnsiToWin32(Mock()) - stream.convert = True - stream.call_win32 = Mock() - stream.extract_params = Mock(return_value='params') - data = { - 'abc\033[adef': ('a', 'params'), - 'abc\033[;;bdef': ('b', 'params'), - 'abc\033[0cdef': ('c', 'params'), - 'abc\033[;;0;;Gdef': ('G', 'params'), - 'abc\033[1;20;128Hdef': ('H', 'params'), - } - for datum, expected in data.items(): - stream.call_win32.reset_mock() - stream.write_and_convert( datum ) - self.assertEqual( stream.call_win32.call_args[0], expected ) - - def test_reset_all_shouldnt_raise_on_closed_orig_stdout(self): - stream = StringIO() - converter = AnsiToWin32(stream) - stream.close() - - converter.reset_all() - - def test_wrap_shouldnt_raise_on_closed_orig_stdout(self): - stream = StringIO() - stream.close() - with \ - patch("colorama.ansitowin32.os.name", "nt"), \ - patch("colorama.ansitowin32.winapi_test", lambda: True): - converter = AnsiToWin32(stream) - self.assertTrue(converter.strip) - self.assertFalse(converter.convert) - - def test_wrap_shouldnt_raise_on_missing_closed_attr(self): - with \ - patch("colorama.ansitowin32.os.name", "nt"), \ - patch("colorama.ansitowin32.winapi_test", lambda: True): - converter = AnsiToWin32(object()) - self.assertTrue(converter.strip) - self.assertFalse(converter.convert) - - def testExtractParams(self): - stream = AnsiToWin32(Mock()) - data = { - '': (0,), - ';;': (0,), - '2': (2,), - ';;002;;': (2,), - '0;1': (0, 1), - ';;003;;456;;': (3, 456), - '11;22;33;44;55': (11, 22, 33, 44, 55), - } - for datum, expected in data.items(): - self.assertEqual(stream.extract_params('m', datum), expected) - - def testCallWin32UsesLookup(self): - listener = Mock() - stream = AnsiToWin32(listener) - stream.win32_calls = { - 1: (lambda *_, **__: listener(11),), - 2: (lambda *_, **__: listener(22),), - 3: (lambda *_, **__: listener(33),), - } - stream.call_win32('m', (3, 1, 99, 2)) - self.assertEqual( - [a[0][0] for a in listener.call_args_list], - [33, 11, 22] ) - - def test_osc_codes(self): - mockStdout = Mock() - stream = AnsiToWin32(mockStdout, convert=True) - with patch('colorama.ansitowin32.winterm') as winterm: - data = [ - '\033]0\x07', # missing arguments - '\033]0;foo\x08', # wrong OSC command - '\033]0;colorama_test_title\x07', # should work - '\033]1;colorama_test_title\x07', # wrong set command - '\033]2;colorama_test_title\x07', # should work - '\033]' + ';' * 64 + '\x08', # see issue #247 - ] - for code in data: - stream.write(code) - self.assertEqual(winterm.set_title.call_count, 2) - - def test_native_windows_ansi(self): - with ExitStack() as stack: - def p(a, b): - stack.enter_context(patch(a, b, create=True)) - # Pretend to be on Windows - p("colorama.ansitowin32.os.name", "nt") - p("colorama.ansitowin32.winapi_test", lambda: True) - p("colorama.win32.winapi_test", lambda: True) - p("colorama.winterm.win32.windll", "non-None") - p("colorama.winterm.get_osfhandle", lambda _: 1234) - - # Pretend that our mock stream has native ANSI support - p( - "colorama.winterm.win32.GetConsoleMode", - lambda _: ENABLE_VIRTUAL_TERMINAL_PROCESSING, - ) - SetConsoleMode = Mock() - p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode) - - stdout = Mock() - stdout.closed = False - stdout.isatty.return_value = True - stdout.fileno.return_value = 1 - - # Our fake console says it has native vt support, so AnsiToWin32 should - # enable that support and do nothing else. - stream = AnsiToWin32(stdout) - SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING) - self.assertFalse(stream.strip) - self.assertFalse(stream.convert) - self.assertFalse(stream.should_wrap()) - - # Now let's pretend we're on an old Windows console, that doesn't have - # native ANSI support. - p("colorama.winterm.win32.GetConsoleMode", lambda _: 0) - SetConsoleMode = Mock() - p("colorama.winterm.win32.SetConsoleMode", SetConsoleMode) - - stream = AnsiToWin32(stdout) - SetConsoleMode.assert_called_with(1234, ENABLE_VIRTUAL_TERMINAL_PROCESSING) - self.assertTrue(stream.strip) - self.assertTrue(stream.convert) - self.assertTrue(stream.should_wrap()) - - -if __name__ == '__main__': - main() diff --git a/myenv/Lib/site-packages/colorama/tests/initialise_test.py b/myenv/Lib/site-packages/colorama/tests/initialise_test.py deleted file mode 100644 index 89f9b07..0000000 --- a/myenv/Lib/site-packages/colorama/tests/initialise_test.py +++ /dev/null @@ -1,189 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main, skipUnless - -try: - from unittest.mock import patch, Mock -except ImportError: - from mock import patch, Mock - -from ..ansitowin32 import StreamWrapper -from ..initialise import init, just_fix_windows_console, _wipe_internal_state_for_tests -from .utils import osname, replace_by - -orig_stdout = sys.stdout -orig_stderr = sys.stderr - - -class InitTest(TestCase): - - @skipUnless(sys.stdout.isatty(), "sys.stdout is not a tty") - def setUp(self): - # sanity check - self.assertNotWrapped() - - def tearDown(self): - _wipe_internal_state_for_tests() - sys.stdout = orig_stdout - sys.stderr = orig_stderr - - def assertWrapped(self): - self.assertIsNot(sys.stdout, orig_stdout, 'stdout should be wrapped') - self.assertIsNot(sys.stderr, orig_stderr, 'stderr should be wrapped') - self.assertTrue(isinstance(sys.stdout, StreamWrapper), - 'bad stdout wrapper') - self.assertTrue(isinstance(sys.stderr, StreamWrapper), - 'bad stderr wrapper') - - def assertNotWrapped(self): - self.assertIs(sys.stdout, orig_stdout, 'stdout should not be wrapped') - self.assertIs(sys.stderr, orig_stderr, 'stderr should not be wrapped') - - @patch('colorama.initialise.reset_all') - @patch('colorama.ansitowin32.winapi_test', lambda *_: True) - @patch('colorama.ansitowin32.enable_vt_processing', lambda *_: False) - def testInitWrapsOnWindows(self, _): - with osname("nt"): - init() - self.assertWrapped() - - @patch('colorama.initialise.reset_all') - @patch('colorama.ansitowin32.winapi_test', lambda *_: False) - def testInitDoesntWrapOnEmulatedWindows(self, _): - with osname("nt"): - init() - self.assertNotWrapped() - - def testInitDoesntWrapOnNonWindows(self): - with osname("posix"): - init() - self.assertNotWrapped() - - def testInitDoesntWrapIfNone(self): - with replace_by(None): - init() - # We can't use assertNotWrapped here because replace_by(None) - # changes stdout/stderr already. - self.assertIsNone(sys.stdout) - self.assertIsNone(sys.stderr) - - def testInitAutoresetOnWrapsOnAllPlatforms(self): - with osname("posix"): - init(autoreset=True) - self.assertWrapped() - - def testInitWrapOffDoesntWrapOnWindows(self): - with osname("nt"): - init(wrap=False) - self.assertNotWrapped() - - def testInitWrapOffIncompatibleWithAutoresetOn(self): - self.assertRaises(ValueError, lambda: init(autoreset=True, wrap=False)) - - @patch('colorama.win32.SetConsoleTextAttribute') - @patch('colorama.initialise.AnsiToWin32') - def testAutoResetPassedOn(self, mockATW32, _): - with osname("nt"): - init(autoreset=True) - self.assertEqual(len(mockATW32.call_args_list), 2) - self.assertEqual(mockATW32.call_args_list[1][1]['autoreset'], True) - self.assertEqual(mockATW32.call_args_list[0][1]['autoreset'], True) - - @patch('colorama.initialise.AnsiToWin32') - def testAutoResetChangeable(self, mockATW32): - with osname("nt"): - init() - - init(autoreset=True) - self.assertEqual(len(mockATW32.call_args_list), 4) - self.assertEqual(mockATW32.call_args_list[2][1]['autoreset'], True) - self.assertEqual(mockATW32.call_args_list[3][1]['autoreset'], True) - - init() - self.assertEqual(len(mockATW32.call_args_list), 6) - self.assertEqual( - mockATW32.call_args_list[4][1]['autoreset'], False) - self.assertEqual( - mockATW32.call_args_list[5][1]['autoreset'], False) - - - @patch('colorama.initialise.atexit.register') - def testAtexitRegisteredOnlyOnce(self, mockRegister): - init() - self.assertTrue(mockRegister.called) - mockRegister.reset_mock() - init() - self.assertFalse(mockRegister.called) - - -class JustFixWindowsConsoleTest(TestCase): - def _reset(self): - _wipe_internal_state_for_tests() - sys.stdout = orig_stdout - sys.stderr = orig_stderr - - def tearDown(self): - self._reset() - - @patch("colorama.ansitowin32.winapi_test", lambda: True) - def testJustFixWindowsConsole(self): - if sys.platform != "win32": - # just_fix_windows_console should be a no-op - just_fix_windows_console() - self.assertIs(sys.stdout, orig_stdout) - self.assertIs(sys.stderr, orig_stderr) - else: - def fake_std(): - # Emulate stdout=not a tty, stderr=tty - # to check that we handle both cases correctly - stdout = Mock() - stdout.closed = False - stdout.isatty.return_value = False - stdout.fileno.return_value = 1 - sys.stdout = stdout - - stderr = Mock() - stderr.closed = False - stderr.isatty.return_value = True - stderr.fileno.return_value = 2 - sys.stderr = stderr - - for native_ansi in [False, True]: - with patch( - 'colorama.ansitowin32.enable_vt_processing', - lambda *_: native_ansi - ): - self._reset() - fake_std() - - # Regular single-call test - prev_stdout = sys.stdout - prev_stderr = sys.stderr - just_fix_windows_console() - self.assertIs(sys.stdout, prev_stdout) - if native_ansi: - self.assertIs(sys.stderr, prev_stderr) - else: - self.assertIsNot(sys.stderr, prev_stderr) - - # second call without resetting is always a no-op - prev_stdout = sys.stdout - prev_stderr = sys.stderr - just_fix_windows_console() - self.assertIs(sys.stdout, prev_stdout) - self.assertIs(sys.stderr, prev_stderr) - - self._reset() - fake_std() - - # If init() runs first, just_fix_windows_console should be a no-op - init() - prev_stdout = sys.stdout - prev_stderr = sys.stderr - just_fix_windows_console() - self.assertIs(prev_stdout, sys.stdout) - self.assertIs(prev_stderr, sys.stderr) - - -if __name__ == '__main__': - main() diff --git a/myenv/Lib/site-packages/colorama/tests/isatty_test.py b/myenv/Lib/site-packages/colorama/tests/isatty_test.py deleted file mode 100644 index 0f84e4b..0000000 --- a/myenv/Lib/site-packages/colorama/tests/isatty_test.py +++ /dev/null @@ -1,57 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main - -from ..ansitowin32 import StreamWrapper, AnsiToWin32 -from .utils import pycharm, replace_by, replace_original_by, StreamTTY, StreamNonTTY - - -def is_a_tty(stream): - return StreamWrapper(stream, None).isatty() - -class IsattyTest(TestCase): - - def test_TTY(self): - tty = StreamTTY() - self.assertTrue(is_a_tty(tty)) - with pycharm(): - self.assertTrue(is_a_tty(tty)) - - def test_nonTTY(self): - non_tty = StreamNonTTY() - self.assertFalse(is_a_tty(non_tty)) - with pycharm(): - self.assertFalse(is_a_tty(non_tty)) - - def test_withPycharm(self): - with pycharm(): - self.assertTrue(is_a_tty(sys.stderr)) - self.assertTrue(is_a_tty(sys.stdout)) - - def test_withPycharmTTYOverride(self): - tty = StreamTTY() - with pycharm(), replace_by(tty): - self.assertTrue(is_a_tty(tty)) - - def test_withPycharmNonTTYOverride(self): - non_tty = StreamNonTTY() - with pycharm(), replace_by(non_tty): - self.assertFalse(is_a_tty(non_tty)) - - def test_withPycharmNoneOverride(self): - with pycharm(): - with replace_by(None), replace_original_by(None): - self.assertFalse(is_a_tty(None)) - self.assertFalse(is_a_tty(StreamNonTTY())) - self.assertTrue(is_a_tty(StreamTTY())) - - def test_withPycharmStreamWrapped(self): - with pycharm(): - self.assertTrue(AnsiToWin32(StreamTTY()).stream.isatty()) - self.assertFalse(AnsiToWin32(StreamNonTTY()).stream.isatty()) - self.assertTrue(AnsiToWin32(sys.stdout).stream.isatty()) - self.assertTrue(AnsiToWin32(sys.stderr).stream.isatty()) - - -if __name__ == '__main__': - main() diff --git a/myenv/Lib/site-packages/colorama/tests/utils.py b/myenv/Lib/site-packages/colorama/tests/utils.py deleted file mode 100644 index 472fafb..0000000 --- a/myenv/Lib/site-packages/colorama/tests/utils.py +++ /dev/null @@ -1,49 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -from contextlib import contextmanager -from io import StringIO -import sys -import os - - -class StreamTTY(StringIO): - def isatty(self): - return True - -class StreamNonTTY(StringIO): - def isatty(self): - return False - -@contextmanager -def osname(name): - orig = os.name - os.name = name - yield - os.name = orig - -@contextmanager -def replace_by(stream): - orig_stdout = sys.stdout - orig_stderr = sys.stderr - sys.stdout = stream - sys.stderr = stream - yield - sys.stdout = orig_stdout - sys.stderr = orig_stderr - -@contextmanager -def replace_original_by(stream): - orig_stdout = sys.__stdout__ - orig_stderr = sys.__stderr__ - sys.__stdout__ = stream - sys.__stderr__ = stream - yield - sys.__stdout__ = orig_stdout - sys.__stderr__ = orig_stderr - -@contextmanager -def pycharm(): - os.environ["PYCHARM_HOSTED"] = "1" - non_tty = StreamNonTTY() - with replace_by(non_tty), replace_original_by(non_tty): - yield - del os.environ["PYCHARM_HOSTED"] diff --git a/myenv/Lib/site-packages/colorama/tests/winterm_test.py b/myenv/Lib/site-packages/colorama/tests/winterm_test.py deleted file mode 100644 index d0955f9..0000000 --- a/myenv/Lib/site-packages/colorama/tests/winterm_test.py +++ /dev/null @@ -1,131 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -import sys -from unittest import TestCase, main, skipUnless - -try: - from unittest.mock import Mock, patch -except ImportError: - from mock import Mock, patch - -from ..winterm import WinColor, WinStyle, WinTerm - - -class WinTermTest(TestCase): - - @patch('colorama.winterm.win32') - def testInit(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 7 + 6 * 16 + 8 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - self.assertEqual(term._fore, 7) - self.assertEqual(term._back, 6) - self.assertEqual(term._style, 8) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testGetAttrs(self): - term = WinTerm() - - term._fore = 0 - term._back = 0 - term._style = 0 - self.assertEqual(term.get_attrs(), 0) - - term._fore = WinColor.YELLOW - self.assertEqual(term.get_attrs(), WinColor.YELLOW) - - term._back = WinColor.MAGENTA - self.assertEqual( - term.get_attrs(), - WinColor.YELLOW + WinColor.MAGENTA * 16) - - term._style = WinStyle.BRIGHT - self.assertEqual( - term.get_attrs(), - WinColor.YELLOW + WinColor.MAGENTA * 16 + WinStyle.BRIGHT) - - @patch('colorama.winterm.win32') - def testResetAll(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 1 + 2 * 16 + 8 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - - term.set_console = Mock() - term._fore = -1 - term._back = -1 - term._style = -1 - - term.reset_all() - - self.assertEqual(term._fore, 1) - self.assertEqual(term._back, 2) - self.assertEqual(term._style, 8) - self.assertEqual(term.set_console.called, True) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testFore(self): - term = WinTerm() - term.set_console = Mock() - term._fore = 0 - - term.fore(5) - - self.assertEqual(term._fore, 5) - self.assertEqual(term.set_console.called, True) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testBack(self): - term = WinTerm() - term.set_console = Mock() - term._back = 0 - - term.back(5) - - self.assertEqual(term._back, 5) - self.assertEqual(term.set_console.called, True) - - @skipUnless(sys.platform.startswith("win"), "requires Windows") - def testStyle(self): - term = WinTerm() - term.set_console = Mock() - term._style = 0 - - term.style(22) - - self.assertEqual(term._style, 22) - self.assertEqual(term.set_console.called, True) - - @patch('colorama.winterm.win32') - def testSetConsole(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 0 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - term.windll = Mock() - - term.set_console() - - self.assertEqual( - mockWin32.SetConsoleTextAttribute.call_args, - ((mockWin32.STDOUT, term.get_attrs()), {}) - ) - - @patch('colorama.winterm.win32') - def testSetConsoleOnStderr(self, mockWin32): - mockAttr = Mock() - mockAttr.wAttributes = 0 - mockWin32.GetConsoleScreenBufferInfo.return_value = mockAttr - term = WinTerm() - term.windll = Mock() - - term.set_console(on_stderr=True) - - self.assertEqual( - mockWin32.SetConsoleTextAttribute.call_args, - ((mockWin32.STDERR, term.get_attrs()), {}) - ) - - -if __name__ == '__main__': - main() diff --git a/myenv/Lib/site-packages/colorama/win32.py b/myenv/Lib/site-packages/colorama/win32.py deleted file mode 100644 index 841b0e2..0000000 --- a/myenv/Lib/site-packages/colorama/win32.py +++ /dev/null @@ -1,180 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. - -# from winbase.h -STDOUT = -11 -STDERR = -12 - -ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x0004 - -try: - import ctypes - from ctypes import LibraryLoader - windll = LibraryLoader(ctypes.WinDLL) - from ctypes import wintypes -except (AttributeError, ImportError): - windll = None - SetConsoleTextAttribute = lambda *_: None - winapi_test = lambda *_: None -else: - from ctypes import byref, Structure, c_char, POINTER - - COORD = wintypes._COORD - - class CONSOLE_SCREEN_BUFFER_INFO(Structure): - """struct in wincon.h.""" - _fields_ = [ - ("dwSize", COORD), - ("dwCursorPosition", COORD), - ("wAttributes", wintypes.WORD), - ("srWindow", wintypes.SMALL_RECT), - ("dwMaximumWindowSize", COORD), - ] - def __str__(self): - return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % ( - self.dwSize.Y, self.dwSize.X - , self.dwCursorPosition.Y, self.dwCursorPosition.X - , self.wAttributes - , self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right - , self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X - ) - - _GetStdHandle = windll.kernel32.GetStdHandle - _GetStdHandle.argtypes = [ - wintypes.DWORD, - ] - _GetStdHandle.restype = wintypes.HANDLE - - _GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo - _GetConsoleScreenBufferInfo.argtypes = [ - wintypes.HANDLE, - POINTER(CONSOLE_SCREEN_BUFFER_INFO), - ] - _GetConsoleScreenBufferInfo.restype = wintypes.BOOL - - _SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute - _SetConsoleTextAttribute.argtypes = [ - wintypes.HANDLE, - wintypes.WORD, - ] - _SetConsoleTextAttribute.restype = wintypes.BOOL - - _SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition - _SetConsoleCursorPosition.argtypes = [ - wintypes.HANDLE, - COORD, - ] - _SetConsoleCursorPosition.restype = wintypes.BOOL - - _FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA - _FillConsoleOutputCharacterA.argtypes = [ - wintypes.HANDLE, - c_char, - wintypes.DWORD, - COORD, - POINTER(wintypes.DWORD), - ] - _FillConsoleOutputCharacterA.restype = wintypes.BOOL - - _FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute - _FillConsoleOutputAttribute.argtypes = [ - wintypes.HANDLE, - wintypes.WORD, - wintypes.DWORD, - COORD, - POINTER(wintypes.DWORD), - ] - _FillConsoleOutputAttribute.restype = wintypes.BOOL - - _SetConsoleTitleW = windll.kernel32.SetConsoleTitleW - _SetConsoleTitleW.argtypes = [ - wintypes.LPCWSTR - ] - _SetConsoleTitleW.restype = wintypes.BOOL - - _GetConsoleMode = windll.kernel32.GetConsoleMode - _GetConsoleMode.argtypes = [ - wintypes.HANDLE, - POINTER(wintypes.DWORD) - ] - _GetConsoleMode.restype = wintypes.BOOL - - _SetConsoleMode = windll.kernel32.SetConsoleMode - _SetConsoleMode.argtypes = [ - wintypes.HANDLE, - wintypes.DWORD - ] - _SetConsoleMode.restype = wintypes.BOOL - - def _winapi_test(handle): - csbi = CONSOLE_SCREEN_BUFFER_INFO() - success = _GetConsoleScreenBufferInfo( - handle, byref(csbi)) - return bool(success) - - def winapi_test(): - return any(_winapi_test(h) for h in - (_GetStdHandle(STDOUT), _GetStdHandle(STDERR))) - - def GetConsoleScreenBufferInfo(stream_id=STDOUT): - handle = _GetStdHandle(stream_id) - csbi = CONSOLE_SCREEN_BUFFER_INFO() - success = _GetConsoleScreenBufferInfo( - handle, byref(csbi)) - return csbi - - def SetConsoleTextAttribute(stream_id, attrs): - handle = _GetStdHandle(stream_id) - return _SetConsoleTextAttribute(handle, attrs) - - def SetConsoleCursorPosition(stream_id, position, adjust=True): - position = COORD(*position) - # If the position is out of range, do nothing. - if position.Y <= 0 or position.X <= 0: - return - # Adjust for Windows' SetConsoleCursorPosition: - # 1. being 0-based, while ANSI is 1-based. - # 2. expecting (x,y), while ANSI uses (y,x). - adjusted_position = COORD(position.Y - 1, position.X - 1) - if adjust: - # Adjust for viewport's scroll position - sr = GetConsoleScreenBufferInfo(STDOUT).srWindow - adjusted_position.Y += sr.Top - adjusted_position.X += sr.Left - # Resume normal processing - handle = _GetStdHandle(stream_id) - return _SetConsoleCursorPosition(handle, adjusted_position) - - def FillConsoleOutputCharacter(stream_id, char, length, start): - handle = _GetStdHandle(stream_id) - char = c_char(char.encode()) - length = wintypes.DWORD(length) - num_written = wintypes.DWORD(0) - # Note that this is hard-coded for ANSI (vs wide) bytes. - success = _FillConsoleOutputCharacterA( - handle, char, length, start, byref(num_written)) - return num_written.value - - def FillConsoleOutputAttribute(stream_id, attr, length, start): - ''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )''' - handle = _GetStdHandle(stream_id) - attribute = wintypes.WORD(attr) - length = wintypes.DWORD(length) - num_written = wintypes.DWORD(0) - # Note that this is hard-coded for ANSI (vs wide) bytes. - return _FillConsoleOutputAttribute( - handle, attribute, length, start, byref(num_written)) - - def SetConsoleTitle(title): - return _SetConsoleTitleW(title) - - def GetConsoleMode(handle): - mode = wintypes.DWORD() - success = _GetConsoleMode(handle, byref(mode)) - if not success: - raise ctypes.WinError() - return mode.value - - def SetConsoleMode(handle, mode): - success = _SetConsoleMode(handle, mode) - if not success: - raise ctypes.WinError() diff --git a/myenv/Lib/site-packages/colorama/winterm.py b/myenv/Lib/site-packages/colorama/winterm.py deleted file mode 100644 index aad867e..0000000 --- a/myenv/Lib/site-packages/colorama/winterm.py +++ /dev/null @@ -1,195 +0,0 @@ -# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file. -try: - from msvcrt import get_osfhandle -except ImportError: - def get_osfhandle(_): - raise OSError("This isn't windows!") - - -from . import win32 - -# from wincon.h -class WinColor(object): - BLACK = 0 - BLUE = 1 - GREEN = 2 - CYAN = 3 - RED = 4 - MAGENTA = 5 - YELLOW = 6 - GREY = 7 - -# from wincon.h -class WinStyle(object): - NORMAL = 0x00 # dim text, dim background - BRIGHT = 0x08 # bright text, dim background - BRIGHT_BACKGROUND = 0x80 # dim text, bright background - -class WinTerm(object): - - def __init__(self): - self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes - self.set_attrs(self._default) - self._default_fore = self._fore - self._default_back = self._back - self._default_style = self._style - # In order to emulate LIGHT_EX in windows, we borrow the BRIGHT style. - # So that LIGHT_EX colors and BRIGHT style do not clobber each other, - # we track them separately, since LIGHT_EX is overwritten by Fore/Back - # and BRIGHT is overwritten by Style codes. - self._light = 0 - - def get_attrs(self): - return self._fore + self._back * 16 + (self._style | self._light) - - def set_attrs(self, value): - self._fore = value & 7 - self._back = (value >> 4) & 7 - self._style = value & (WinStyle.BRIGHT | WinStyle.BRIGHT_BACKGROUND) - - def reset_all(self, on_stderr=None): - self.set_attrs(self._default) - self.set_console(attrs=self._default) - self._light = 0 - - def fore(self, fore=None, light=False, on_stderr=False): - if fore is None: - fore = self._default_fore - self._fore = fore - # Emulate LIGHT_EX with BRIGHT Style - if light: - self._light |= WinStyle.BRIGHT - else: - self._light &= ~WinStyle.BRIGHT - self.set_console(on_stderr=on_stderr) - - def back(self, back=None, light=False, on_stderr=False): - if back is None: - back = self._default_back - self._back = back - # Emulate LIGHT_EX with BRIGHT_BACKGROUND Style - if light: - self._light |= WinStyle.BRIGHT_BACKGROUND - else: - self._light &= ~WinStyle.BRIGHT_BACKGROUND - self.set_console(on_stderr=on_stderr) - - def style(self, style=None, on_stderr=False): - if style is None: - style = self._default_style - self._style = style - self.set_console(on_stderr=on_stderr) - - def set_console(self, attrs=None, on_stderr=False): - if attrs is None: - attrs = self.get_attrs() - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - win32.SetConsoleTextAttribute(handle, attrs) - - def get_position(self, handle): - position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition - # Because Windows coordinates are 0-based, - # and win32.SetConsoleCursorPosition expects 1-based. - position.X += 1 - position.Y += 1 - return position - - def set_cursor_position(self, position=None, on_stderr=False): - if position is None: - # I'm not currently tracking the position, so there is no default. - # position = self.get_position() - return - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - win32.SetConsoleCursorPosition(handle, position) - - def cursor_adjust(self, x, y, on_stderr=False): - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - position = self.get_position(handle) - adjusted_position = (position.Y + y, position.X + x) - win32.SetConsoleCursorPosition(handle, adjusted_position, adjust=False) - - def erase_screen(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the screen. - # 1 should clear from the cursor to the beginning of the screen. - # 2 should clear the entire screen, and move cursor to (1,1) - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - csbi = win32.GetConsoleScreenBufferInfo(handle) - # get the number of character cells in the current buffer - cells_in_screen = csbi.dwSize.X * csbi.dwSize.Y - # get number of character cells before current cursor position - cells_before_cursor = csbi.dwSize.X * csbi.dwCursorPosition.Y + csbi.dwCursorPosition.X - if mode == 0: - from_coord = csbi.dwCursorPosition - cells_to_erase = cells_in_screen - cells_before_cursor - elif mode == 1: - from_coord = win32.COORD(0, 0) - cells_to_erase = cells_before_cursor - elif mode == 2: - from_coord = win32.COORD(0, 0) - cells_to_erase = cells_in_screen - else: - # invalid mode - return - # fill the entire screen with blanks - win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) - # now set the buffer's attributes accordingly - win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) - if mode == 2: - # put the cursor where needed - win32.SetConsoleCursorPosition(handle, (1, 1)) - - def erase_line(self, mode=0, on_stderr=False): - # 0 should clear from the cursor to the end of the line. - # 1 should clear from the cursor to the beginning of the line. - # 2 should clear the entire line. - handle = win32.STDOUT - if on_stderr: - handle = win32.STDERR - csbi = win32.GetConsoleScreenBufferInfo(handle) - if mode == 0: - from_coord = csbi.dwCursorPosition - cells_to_erase = csbi.dwSize.X - csbi.dwCursorPosition.X - elif mode == 1: - from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) - cells_to_erase = csbi.dwCursorPosition.X - elif mode == 2: - from_coord = win32.COORD(0, csbi.dwCursorPosition.Y) - cells_to_erase = csbi.dwSize.X - else: - # invalid mode - return - # fill the entire screen with blanks - win32.FillConsoleOutputCharacter(handle, ' ', cells_to_erase, from_coord) - # now set the buffer's attributes accordingly - win32.FillConsoleOutputAttribute(handle, self.get_attrs(), cells_to_erase, from_coord) - - def set_title(self, title): - win32.SetConsoleTitle(title) - - -def enable_vt_processing(fd): - if win32.windll is None or not win32.winapi_test(): - return False - - try: - handle = get_osfhandle(fd) - mode = win32.GetConsoleMode(handle) - win32.SetConsoleMode( - handle, - mode | win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING, - ) - - mode = win32.GetConsoleMode(handle) - if mode & win32.ENABLE_VIRTUAL_TERMINAL_PROCESSING: - return True - # Can get TypeError in testsuite where 'fd' is a Mock() - except (OSError, TypeError): - return False diff --git a/myenv/Lib/site-packages/distro-1.9.0.dist-info/INSTALLER b/myenv/Lib/site-packages/distro-1.9.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/distro-1.9.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/distro-1.9.0.dist-info/LICENSE b/myenv/Lib/site-packages/distro-1.9.0.dist-info/LICENSE deleted file mode 100644 index e06d208..0000000 --- a/myenv/Lib/site-packages/distro-1.9.0.dist-info/LICENSE +++ /dev/null @@ -1,202 +0,0 @@ -Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - diff --git a/myenv/Lib/site-packages/distro-1.9.0.dist-info/METADATA b/myenv/Lib/site-packages/distro-1.9.0.dist-info/METADATA deleted file mode 100644 index 9312e8e..0000000 --- a/myenv/Lib/site-packages/distro-1.9.0.dist-info/METADATA +++ /dev/null @@ -1,184 +0,0 @@ -Metadata-Version: 2.1 -Name: distro -Version: 1.9.0 -Summary: Distro - an OS platform information API -Home-page: https://github.com/python-distro/distro -Author: Nir Cohen -Author-email: nir36g@gmail.com -License: Apache License, Version 2.0 -Platform: All -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Operating System :: POSIX :: Linux -Classifier: Operating System :: POSIX :: BSD -Classifier: Operating System :: POSIX :: BSD :: FreeBSD -Classifier: Operating System :: POSIX :: BSD :: NetBSD -Classifier: Operating System :: POSIX :: BSD :: OpenBSD -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Operating System -Requires-Python: >=3.6 -Description-Content-Type: text/markdown -License-File: LICENSE - -Distro - an OS platform information API -======================================= - -[![CI Status](https://github.com/python-distro/distro/workflows/CI/badge.svg)](https://github.com/python-distro/distro/actions/workflows/ci.yaml) -[![PyPI version](http://img.shields.io/pypi/v/distro.svg)](https://pypi.python.org/pypi/distro) -[![Supported Python Versions](https://img.shields.io/pypi/pyversions/distro.svg)](https://img.shields.io/pypi/pyversions/distro.svg) -[![Code Coverage](https://codecov.io/github/python-distro/distro/coverage.svg?branch=master)](https://codecov.io/github/python-distro/distro?branch=master) -[![Is Wheel](https://img.shields.io/pypi/wheel/distro.svg?style=flat)](https://pypi.python.org/pypi/distro) -[![Latest Github Release](https://readthedocs.org/projects/distro/badge/?version=stable)](http://distro.readthedocs.io/en/latest/) - -`distro` provides information about the -OS distribution it runs on, such as a reliable machine-readable ID, or -version information. - -It is the recommended replacement for Python's original -[`platform.linux_distribution`](https://docs.python.org/3.7/library/platform.html#platform.linux_distribution) -function (removed in Python 3.8). It also provides much more functionality -which isn't necessarily Python bound, like a command-line interface. - -Distro currently supports Linux and BSD based systems but [Windows and OS X support](https://github.com/python-distro/distro/issues/177) is also planned. - -For Python 2.6 support, see https://github.com/python-distro/distro/tree/python2.6-support - -## Installation - -Installation of the latest released version from PyPI: - -```shell -pip install distro -``` - -Installation of the latest development version: - -```shell -pip install https://github.com/python-distro/distro/archive/master.tar.gz -``` - -To use as a standalone script, download `distro.py` directly: - -```shell -curl -O https://raw.githubusercontent.com/python-distro/distro/master/src/distro/distro.py -python distro.py -``` - -``distro`` is safe to vendor within projects that do not wish to add -dependencies. - -```shell -cd myproject -curl -O https://raw.githubusercontent.com/python-distro/distro/master/src/distro/distro.py -``` - -## Usage - -```bash -$ distro -Name: Antergos Linux -Version: 2015.10 (ISO-Rolling) -Codename: ISO-Rolling - -$ distro -j -{ - "codename": "ISO-Rolling", - "id": "antergos", - "like": "arch", - "version": "16.9", - "version_parts": { - "build_number": "", - "major": "16", - "minor": "9" - } -} - - -$ python ->>> import distro ->>> distro.name(pretty=True) -'CentOS Linux 8' ->>> distro.id() -'centos' ->>> distro.version(best=True) -'8.4.2105' -``` - - -## Documentation - -On top of the aforementioned API, several more functions are available. For a complete description of the -API, see the [latest API documentation](http://distro.readthedocs.org/en/latest/). - -## Background - -An alternative implementation became necessary because Python 3.5 deprecated -this function, and Python 3.8 removed it altogether. Its predecessor function -[`platform.dist`](https://docs.python.org/3.7/library/platform.html#platform.dist) -was already deprecated since Python 2.6 and removed in Python 3.8. Still, there -are many cases in which access to that information is needed. See [Python issue -1322](https://bugs.python.org/issue1322) for more information. - -The `distro` package implements a robust and inclusive way of retrieving the -information about a distribution based on new standards and old methods, -namely from these data sources (from high to low precedence): - -* The os-release file `/etc/os-release` if present, with a fall-back on `/usr/lib/os-release` if needed. -* The output of the `lsb_release` command, if available. -* The distro release file (`/etc/*(-|_)(release|version)`), if present. -* The `uname` command for BSD based distrubtions. - - -## Python and Distribution Support - -`distro` is supported and tested on Python 3.6+ and PyPy and on any -distribution that provides one or more of the data sources covered. - -This package is tested with test data that mimics the exact behavior of the data sources of [a number of Linux distributions](https://github.com/python-distro/distro/tree/master/tests/resources/distros). - - -## Testing - -```shell -git clone git@github.com:python-distro/distro.git -cd distro -pip install tox -tox -``` - - -## Contributions - -Pull requests are always welcome to deal with specific distributions or just -for general merriment. - -See [CONTRIBUTIONS](https://github.com/python-distro/distro/blob/master/CONTRIBUTING.md) for contribution info. - -Reference implementations for supporting additional distributions and file -formats can be found here: - -* https://github.com/saltstack/salt/blob/develop/salt/grains/core.py#L1172 -* https://github.com/chef/ohai/blob/master/lib/ohai/plugins/linux/platform.rb -* https://github.com/ansible/ansible/blob/devel/lib/ansible/module_utils/facts/system/distribution.py -* https://github.com/puppetlabs/facter/blob/master/lib/src/facts/linux/os_linux.cc - -## Package manager distributions - -* https://src.fedoraproject.org/rpms/python-distro -* https://www.archlinux.org/packages/community/any/python-distro/ -* https://launchpad.net/ubuntu/+source/python-distro -* https://packages.debian.org/stable/python3-distro -* https://packages.gentoo.org/packages/dev-python/distro -* https://pkgs.org/download/python3-distro -* https://slackbuilds.org/repository/14.2/python/python-distro/ diff --git a/myenv/Lib/site-packages/distro-1.9.0.dist-info/RECORD b/myenv/Lib/site-packages/distro-1.9.0.dist-info/RECORD deleted file mode 100644 index 7c35649..0000000 --- a/myenv/Lib/site-packages/distro-1.9.0.dist-info/RECORD +++ /dev/null @@ -1,15 +0,0 @@ -../../Scripts/distro.exe,sha256=vHiyw0M2Cb-PBGG8RwU-vZYuVAqAltzovlk-UHxc2aw,108443 -distro-1.9.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -distro-1.9.0.dist-info/LICENSE,sha256=y16Ofl9KOYjhBjwULGDcLfdWBfTEZRXnduOspt-XbhQ,11325 -distro-1.9.0.dist-info/METADATA,sha256=MWMqst5VkRMQkbM5e9zfeXcYV52Fp1GG8Gg53QwJ6B0,6791 -distro-1.9.0.dist-info/RECORD,, -distro-1.9.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92 -distro-1.9.0.dist-info/entry_points.txt,sha256=3ObjqQMbh1xeQQwsWtgbfDNDMDD-EbggR1Oj_z8s9hc,46 -distro-1.9.0.dist-info/top_level.txt,sha256=ikde_V_XEdSBqaGd5tEriN_wzYHLgTX_zVtlsGLHvwQ,7 -distro/__init__.py,sha256=2fHjF-SfgPvjyNZ1iHh_wjqWdR_Yo5ODHwZC0jLBPhc,981 -distro/__main__.py,sha256=bu9d3TifoKciZFcqRBuygV3GSuThnVD_m2IK4cz96Vs,64 -distro/__pycache__/__init__.cpython-312.pyc,, -distro/__pycache__/__main__.cpython-312.pyc,, -distro/__pycache__/distro.cpython-312.pyc,, -distro/distro.py,sha256=XqbefacAhDT4zr_trnbA15eY8vdK4GTghgmvUGrEM_4,49430 -distro/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/myenv/Lib/site-packages/distro-1.9.0.dist-info/WHEEL b/myenv/Lib/site-packages/distro-1.9.0.dist-info/WHEEL deleted file mode 100644 index 98c0d20..0000000 --- a/myenv/Lib/site-packages/distro-1.9.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.42.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/myenv/Lib/site-packages/distro-1.9.0.dist-info/entry_points.txt b/myenv/Lib/site-packages/distro-1.9.0.dist-info/entry_points.txt deleted file mode 100644 index 08d29c5..0000000 --- a/myenv/Lib/site-packages/distro-1.9.0.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -distro = distro.distro:main diff --git a/myenv/Lib/site-packages/distro-1.9.0.dist-info/top_level.txt b/myenv/Lib/site-packages/distro-1.9.0.dist-info/top_level.txt deleted file mode 100644 index 0e09331..0000000 --- a/myenv/Lib/site-packages/distro-1.9.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -distro diff --git a/myenv/Lib/site-packages/distro/__init__.py b/myenv/Lib/site-packages/distro/__init__.py deleted file mode 100644 index 7686fe8..0000000 --- a/myenv/Lib/site-packages/distro/__init__.py +++ /dev/null @@ -1,54 +0,0 @@ -from .distro import ( - NORMALIZED_DISTRO_ID, - NORMALIZED_LSB_ID, - NORMALIZED_OS_ID, - LinuxDistribution, - __version__, - build_number, - codename, - distro_release_attr, - distro_release_info, - id, - info, - like, - linux_distribution, - lsb_release_attr, - lsb_release_info, - major_version, - minor_version, - name, - os_release_attr, - os_release_info, - uname_attr, - uname_info, - version, - version_parts, -) - -__all__ = [ - "NORMALIZED_DISTRO_ID", - "NORMALIZED_LSB_ID", - "NORMALIZED_OS_ID", - "LinuxDistribution", - "build_number", - "codename", - "distro_release_attr", - "distro_release_info", - "id", - "info", - "like", - "linux_distribution", - "lsb_release_attr", - "lsb_release_info", - "major_version", - "minor_version", - "name", - "os_release_attr", - "os_release_info", - "uname_attr", - "uname_info", - "version", - "version_parts", -] - -__version__ = __version__ diff --git a/myenv/Lib/site-packages/distro/__main__.py b/myenv/Lib/site-packages/distro/__main__.py deleted file mode 100644 index 0c01d5b..0000000 --- a/myenv/Lib/site-packages/distro/__main__.py +++ /dev/null @@ -1,4 +0,0 @@ -from .distro import main - -if __name__ == "__main__": - main() diff --git a/myenv/Lib/site-packages/distro/distro.py b/myenv/Lib/site-packages/distro/distro.py deleted file mode 100644 index 78ccdfa..0000000 --- a/myenv/Lib/site-packages/distro/distro.py +++ /dev/null @@ -1,1403 +0,0 @@ -#!/usr/bin/env python -# Copyright 2015-2021 Nir Cohen -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -""" -The ``distro`` package (``distro`` stands for Linux Distribution) provides -information about the Linux distribution it runs on, such as a reliable -machine-readable distro ID, or version information. - -It is the recommended replacement for Python's original -:py:func:`platform.linux_distribution` function, but it provides much more -functionality. An alternative implementation became necessary because Python -3.5 deprecated this function, and Python 3.8 removed it altogether. Its -predecessor function :py:func:`platform.dist` was already deprecated since -Python 2.6 and removed in Python 3.8. Still, there are many cases in which -access to OS distribution information is needed. See `Python issue 1322 -`_ for more information. -""" - -import argparse -import json -import logging -import os -import re -import shlex -import subprocess -import sys -import warnings -from typing import ( - Any, - Callable, - Dict, - Iterable, - Optional, - Sequence, - TextIO, - Tuple, - Type, -) - -try: - from typing import TypedDict -except ImportError: - # Python 3.7 - TypedDict = dict - -__version__ = "1.9.0" - - -class VersionDict(TypedDict): - major: str - minor: str - build_number: str - - -class InfoDict(TypedDict): - id: str - version: str - version_parts: VersionDict - like: str - codename: str - - -_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc") -_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib") -_OS_RELEASE_BASENAME = "os-release" - -#: Translation table for normalizing the "ID" attribute defined in os-release -#: files, for use by the :func:`distro.id` method. -#: -#: * Key: Value as defined in the os-release file, translated to lower case, -#: with blanks translated to underscores. -#: -#: * Value: Normalized value. -NORMALIZED_OS_ID = { - "ol": "oracle", # Oracle Linux - "opensuse-leap": "opensuse", # Newer versions of OpenSuSE report as opensuse-leap -} - -#: Translation table for normalizing the "Distributor ID" attribute returned by -#: the lsb_release command, for use by the :func:`distro.id` method. -#: -#: * Key: Value as returned by the lsb_release command, translated to lower -#: case, with blanks translated to underscores. -#: -#: * Value: Normalized value. -NORMALIZED_LSB_ID = { - "enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4 - "enterpriseenterpriseserver": "oracle", # Oracle Linux 5 - "redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation - "redhatenterpriseserver": "rhel", # RHEL 6, 7 Server - "redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode -} - -#: Translation table for normalizing the distro ID derived from the file name -#: of distro release files, for use by the :func:`distro.id` method. -#: -#: * Key: Value as derived from the file name of a distro release file, -#: translated to lower case, with blanks translated to underscores. -#: -#: * Value: Normalized value. -NORMALIZED_DISTRO_ID = { - "redhat": "rhel", # RHEL 6.x, 7.x -} - -# Pattern for content of distro release file (reversed) -_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile( - r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)" -) - -# Pattern for base file name of distro release file -_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$") - -# Base file names to be looked up for if _UNIXCONFDIR is not readable. -_DISTRO_RELEASE_BASENAMES = [ - "SuSE-release", - "altlinux-release", - "arch-release", - "base-release", - "centos-release", - "fedora-release", - "gentoo-release", - "mageia-release", - "mandrake-release", - "mandriva-release", - "mandrivalinux-release", - "manjaro-release", - "oracle-release", - "redhat-release", - "rocky-release", - "sl-release", - "slackware-version", -] - -# Base file names to be ignored when searching for distro release file -_DISTRO_RELEASE_IGNORE_BASENAMES = ( - "debian_version", - "lsb-release", - "oem-release", - _OS_RELEASE_BASENAME, - "system-release", - "plesk-release", - "iredmail-release", - "board-release", - "ec2_version", -) - - -def linux_distribution(full_distribution_name: bool = True) -> Tuple[str, str, str]: - """ - .. deprecated:: 1.6.0 - - :func:`distro.linux_distribution()` is deprecated. It should only be - used as a compatibility shim with Python's - :py:func:`platform.linux_distribution()`. Please use :func:`distro.id`, - :func:`distro.version` and :func:`distro.name` instead. - - Return information about the current OS distribution as a tuple - ``(id_name, version, codename)`` with items as follows: - - * ``id_name``: If *full_distribution_name* is false, the result of - :func:`distro.id`. Otherwise, the result of :func:`distro.name`. - - * ``version``: The result of :func:`distro.version`. - - * ``codename``: The extra item (usually in parentheses) after the - os-release version number, or the result of :func:`distro.codename`. - - The interface of this function is compatible with the original - :py:func:`platform.linux_distribution` function, supporting a subset of - its parameters. - - The data it returns may not exactly be the same, because it uses more data - sources than the original function, and that may lead to different data if - the OS distribution is not consistent across multiple data sources it - provides (there are indeed such distributions ...). - - Another reason for differences is the fact that the :func:`distro.id` - method normalizes the distro ID string to a reliable machine-readable value - for a number of popular OS distributions. - """ - warnings.warn( - "distro.linux_distribution() is deprecated. It should only be used as a " - "compatibility shim with Python's platform.linux_distribution(). Please use " - "distro.id(), distro.version() and distro.name() instead.", - DeprecationWarning, - stacklevel=2, - ) - return _distro.linux_distribution(full_distribution_name) - - -def id() -> str: - """ - Return the distro ID of the current distribution, as a - machine-readable string. - - For a number of OS distributions, the returned distro ID value is - *reliable*, in the sense that it is documented and that it does not change - across releases of the distribution. - - This package maintains the following reliable distro ID values: - - ============== ========================================= - Distro ID Distribution - ============== ========================================= - "ubuntu" Ubuntu - "debian" Debian - "rhel" RedHat Enterprise Linux - "centos" CentOS - "fedora" Fedora - "sles" SUSE Linux Enterprise Server - "opensuse" openSUSE - "amzn" Amazon Linux - "arch" Arch Linux - "buildroot" Buildroot - "cloudlinux" CloudLinux OS - "exherbo" Exherbo Linux - "gentoo" GenToo Linux - "ibm_powerkvm" IBM PowerKVM - "kvmibm" KVM for IBM z Systems - "linuxmint" Linux Mint - "mageia" Mageia - "mandriva" Mandriva Linux - "parallels" Parallels - "pidora" Pidora - "raspbian" Raspbian - "oracle" Oracle Linux (and Oracle Enterprise Linux) - "scientific" Scientific Linux - "slackware" Slackware - "xenserver" XenServer - "openbsd" OpenBSD - "netbsd" NetBSD - "freebsd" FreeBSD - "midnightbsd" MidnightBSD - "rocky" Rocky Linux - "aix" AIX - "guix" Guix System - "altlinux" ALT Linux - ============== ========================================= - - If you have a need to get distros for reliable IDs added into this set, - or if you find that the :func:`distro.id` function returns a different - distro ID for one of the listed distros, please create an issue in the - `distro issue tracker`_. - - **Lookup hierarchy and transformations:** - - First, the ID is obtained from the following sources, in the specified - order. The first available and non-empty value is used: - - * the value of the "ID" attribute of the os-release file, - - * the value of the "Distributor ID" attribute returned by the lsb_release - command, - - * the first part of the file name of the distro release file, - - The so determined ID value then passes the following transformations, - before it is returned by this method: - - * it is translated to lower case, - - * blanks (which should not be there anyway) are translated to underscores, - - * a normalization of the ID is performed, based upon - `normalization tables`_. The purpose of this normalization is to ensure - that the ID is as reliable as possible, even across incompatible changes - in the OS distributions. A common reason for an incompatible change is - the addition of an os-release file, or the addition of the lsb_release - command, with ID values that differ from what was previously determined - from the distro release file name. - """ - return _distro.id() - - -def name(pretty: bool = False) -> str: - """ - Return the name of the current OS distribution, as a human-readable - string. - - If *pretty* is false, the name is returned without version or codename. - (e.g. "CentOS Linux") - - If *pretty* is true, the version and codename are appended. - (e.g. "CentOS Linux 7.1.1503 (Core)") - - **Lookup hierarchy:** - - The name is obtained from the following sources, in the specified order. - The first available and non-empty value is used: - - * If *pretty* is false: - - - the value of the "NAME" attribute of the os-release file, - - - the value of the "Distributor ID" attribute returned by the lsb_release - command, - - - the value of the "" field of the distro release file. - - * If *pretty* is true: - - - the value of the "PRETTY_NAME" attribute of the os-release file, - - - the value of the "Description" attribute returned by the lsb_release - command, - - - the value of the "" field of the distro release file, appended - with the value of the pretty version ("" and "" - fields) of the distro release file, if available. - """ - return _distro.name(pretty) - - -def version(pretty: bool = False, best: bool = False) -> str: - """ - Return the version of the current OS distribution, as a human-readable - string. - - If *pretty* is false, the version is returned without codename (e.g. - "7.0"). - - If *pretty* is true, the codename in parenthesis is appended, if the - codename is non-empty (e.g. "7.0 (Maipo)"). - - Some distributions provide version numbers with different precisions in - the different sources of distribution information. Examining the different - sources in a fixed priority order does not always yield the most precise - version (e.g. for Debian 8.2, or CentOS 7.1). - - Some other distributions may not provide this kind of information. In these - cases, an empty string would be returned. This behavior can be observed - with rolling releases distributions (e.g. Arch Linux). - - The *best* parameter can be used to control the approach for the returned - version: - - If *best* is false, the first non-empty version number in priority order of - the examined sources is returned. - - If *best* is true, the most precise version number out of all examined - sources is returned. - - **Lookup hierarchy:** - - In all cases, the version number is obtained from the following sources. - If *best* is false, this order represents the priority order: - - * the value of the "VERSION_ID" attribute of the os-release file, - * the value of the "Release" attribute returned by the lsb_release - command, - * the version number parsed from the "" field of the first line - of the distro release file, - * the version number parsed from the "PRETTY_NAME" attribute of the - os-release file, if it follows the format of the distro release files. - * the version number parsed from the "Description" attribute returned by - the lsb_release command, if it follows the format of the distro release - files. - """ - return _distro.version(pretty, best) - - -def version_parts(best: bool = False) -> Tuple[str, str, str]: - """ - Return the version of the current OS distribution as a tuple - ``(major, minor, build_number)`` with items as follows: - - * ``major``: The result of :func:`distro.major_version`. - - * ``minor``: The result of :func:`distro.minor_version`. - - * ``build_number``: The result of :func:`distro.build_number`. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.version_parts(best) - - -def major_version(best: bool = False) -> str: - """ - Return the major version of the current OS distribution, as a string, - if provided. - Otherwise, the empty string is returned. The major version is the first - part of the dot-separated version string. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.major_version(best) - - -def minor_version(best: bool = False) -> str: - """ - Return the minor version of the current OS distribution, as a string, - if provided. - Otherwise, the empty string is returned. The minor version is the second - part of the dot-separated version string. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.minor_version(best) - - -def build_number(best: bool = False) -> str: - """ - Return the build number of the current OS distribution, as a string, - if provided. - Otherwise, the empty string is returned. The build number is the third part - of the dot-separated version string. - - For a description of the *best* parameter, see the :func:`distro.version` - method. - """ - return _distro.build_number(best) - - -def like() -> str: - """ - Return a space-separated list of distro IDs of distributions that are - closely related to the current OS distribution in regards to packaging - and programming interfaces, for example distributions the current - distribution is a derivative from. - - **Lookup hierarchy:** - - This information item is only provided by the os-release file. - For details, see the description of the "ID_LIKE" attribute in the - `os-release man page - `_. - """ - return _distro.like() - - -def codename() -> str: - """ - Return the codename for the release of the current OS distribution, - as a string. - - If the distribution does not have a codename, an empty string is returned. - - Note that the returned codename is not always really a codename. For - example, openSUSE returns "x86_64". This function does not handle such - cases in any special way and just returns the string it finds, if any. - - **Lookup hierarchy:** - - * the codename within the "VERSION" attribute of the os-release file, if - provided, - - * the value of the "Codename" attribute returned by the lsb_release - command, - - * the value of the "" field of the distro release file. - """ - return _distro.codename() - - -def info(pretty: bool = False, best: bool = False) -> InfoDict: - """ - Return certain machine-readable information items about the current OS - distribution in a dictionary, as shown in the following example: - - .. sourcecode:: python - - { - 'id': 'rhel', - 'version': '7.0', - 'version_parts': { - 'major': '7', - 'minor': '0', - 'build_number': '' - }, - 'like': 'fedora', - 'codename': 'Maipo' - } - - The dictionary structure and keys are always the same, regardless of which - information items are available in the underlying data sources. The values - for the various keys are as follows: - - * ``id``: The result of :func:`distro.id`. - - * ``version``: The result of :func:`distro.version`. - - * ``version_parts -> major``: The result of :func:`distro.major_version`. - - * ``version_parts -> minor``: The result of :func:`distro.minor_version`. - - * ``version_parts -> build_number``: The result of - :func:`distro.build_number`. - - * ``like``: The result of :func:`distro.like`. - - * ``codename``: The result of :func:`distro.codename`. - - For a description of the *pretty* and *best* parameters, see the - :func:`distro.version` method. - """ - return _distro.info(pretty, best) - - -def os_release_info() -> Dict[str, str]: - """ - Return a dictionary containing key-value pairs for the information items - from the os-release file data source of the current OS distribution. - - See `os-release file`_ for details about these information items. - """ - return _distro.os_release_info() - - -def lsb_release_info() -> Dict[str, str]: - """ - Return a dictionary containing key-value pairs for the information items - from the lsb_release command data source of the current OS distribution. - - See `lsb_release command output`_ for details about these information - items. - """ - return _distro.lsb_release_info() - - -def distro_release_info() -> Dict[str, str]: - """ - Return a dictionary containing key-value pairs for the information items - from the distro release file data source of the current OS distribution. - - See `distro release file`_ for details about these information items. - """ - return _distro.distro_release_info() - - -def uname_info() -> Dict[str, str]: - """ - Return a dictionary containing key-value pairs for the information items - from the distro release file data source of the current OS distribution. - """ - return _distro.uname_info() - - -def os_release_attr(attribute: str) -> str: - """ - Return a single named information item from the os-release file data source - of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - - See `os-release file`_ for details about these information items. - """ - return _distro.os_release_attr(attribute) - - -def lsb_release_attr(attribute: str) -> str: - """ - Return a single named information item from the lsb_release command output - data source of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - - See `lsb_release command output`_ for details about these information - items. - """ - return _distro.lsb_release_attr(attribute) - - -def distro_release_attr(attribute: str) -> str: - """ - Return a single named information item from the distro release file - data source of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - - See `distro release file`_ for details about these information items. - """ - return _distro.distro_release_attr(attribute) - - -def uname_attr(attribute: str) -> str: - """ - Return a single named information item from the distro release file - data source of the current OS distribution. - - Parameters: - - * ``attribute`` (string): Key of the information item. - - Returns: - - * (string): Value of the information item, if the item exists. - The empty string, if the item does not exist. - """ - return _distro.uname_attr(attribute) - - -try: - from functools import cached_property -except ImportError: - # Python < 3.8 - class cached_property: # type: ignore - """A version of @property which caches the value. On access, it calls the - underlying function and sets the value in `__dict__` so future accesses - will not re-call the property. - """ - - def __init__(self, f: Callable[[Any], Any]) -> None: - self._fname = f.__name__ - self._f = f - - def __get__(self, obj: Any, owner: Type[Any]) -> Any: - assert obj is not None, f"call {self._fname} on an instance" - ret = obj.__dict__[self._fname] = self._f(obj) - return ret - - -class LinuxDistribution: - """ - Provides information about a OS distribution. - - This package creates a private module-global instance of this class with - default initialization arguments, that is used by the - `consolidated accessor functions`_ and `single source accessor functions`_. - By using default initialization arguments, that module-global instance - returns data about the current OS distribution (i.e. the distro this - package runs on). - - Normally, it is not necessary to create additional instances of this class. - However, in situations where control is needed over the exact data sources - that are used, instances of this class can be created with a specific - distro release file, or a specific os-release file, or without invoking the - lsb_release command. - """ - - def __init__( - self, - include_lsb: Optional[bool] = None, - os_release_file: str = "", - distro_release_file: str = "", - include_uname: Optional[bool] = None, - root_dir: Optional[str] = None, - include_oslevel: Optional[bool] = None, - ) -> None: - """ - The initialization method of this class gathers information from the - available data sources, and stores that in private instance attributes. - Subsequent access to the information items uses these private instance - attributes, so that the data sources are read only once. - - Parameters: - - * ``include_lsb`` (bool): Controls whether the - `lsb_release command output`_ is included as a data source. - - If the lsb_release command is not available in the program execution - path, the data source for the lsb_release command will be empty. - - * ``os_release_file`` (string): The path name of the - `os-release file`_ that is to be used as a data source. - - An empty string (the default) will cause the default path name to - be used (see `os-release file`_ for details). - - If the specified or defaulted os-release file does not exist, the - data source for the os-release file will be empty. - - * ``distro_release_file`` (string): The path name of the - `distro release file`_ that is to be used as a data source. - - An empty string (the default) will cause a default search algorithm - to be used (see `distro release file`_ for details). - - If the specified distro release file does not exist, or if no default - distro release file can be found, the data source for the distro - release file will be empty. - - * ``include_uname`` (bool): Controls whether uname command output is - included as a data source. If the uname command is not available in - the program execution path the data source for the uname command will - be empty. - - * ``root_dir`` (string): The absolute path to the root directory to use - to find distro-related information files. Note that ``include_*`` - parameters must not be enabled in combination with ``root_dir``. - - * ``include_oslevel`` (bool): Controls whether (AIX) oslevel command - output is included as a data source. If the oslevel command is not - available in the program execution path the data source will be - empty. - - Public instance attributes: - - * ``os_release_file`` (string): The path name of the - `os-release file`_ that is actually used as a data source. The - empty string if no distro release file is used as a data source. - - * ``distro_release_file`` (string): The path name of the - `distro release file`_ that is actually used as a data source. The - empty string if no distro release file is used as a data source. - - * ``include_lsb`` (bool): The result of the ``include_lsb`` parameter. - This controls whether the lsb information will be loaded. - - * ``include_uname`` (bool): The result of the ``include_uname`` - parameter. This controls whether the uname information will - be loaded. - - * ``include_oslevel`` (bool): The result of the ``include_oslevel`` - parameter. This controls whether (AIX) oslevel information will be - loaded. - - * ``root_dir`` (string): The result of the ``root_dir`` parameter. - The absolute path to the root directory to use to find distro-related - information files. - - Raises: - - * :py:exc:`ValueError`: Initialization parameters combination is not - supported. - - * :py:exc:`OSError`: Some I/O issue with an os-release file or distro - release file. - - * :py:exc:`UnicodeError`: A data source has unexpected characters or - uses an unexpected encoding. - """ - self.root_dir = root_dir - self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR - self.usr_lib_dir = ( - os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR - ) - - if os_release_file: - self.os_release_file = os_release_file - else: - etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME) - usr_lib_os_release_file = os.path.join( - self.usr_lib_dir, _OS_RELEASE_BASENAME - ) - - # NOTE: The idea is to respect order **and** have it set - # at all times for API backwards compatibility. - if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile( - usr_lib_os_release_file - ): - self.os_release_file = etc_dir_os_release_file - else: - self.os_release_file = usr_lib_os_release_file - - self.distro_release_file = distro_release_file or "" # updated later - - is_root_dir_defined = root_dir is not None - if is_root_dir_defined and (include_lsb or include_uname or include_oslevel): - raise ValueError( - "Including subprocess data sources from specific root_dir is disallowed" - " to prevent false information" - ) - self.include_lsb = ( - include_lsb if include_lsb is not None else not is_root_dir_defined - ) - self.include_uname = ( - include_uname if include_uname is not None else not is_root_dir_defined - ) - self.include_oslevel = ( - include_oslevel if include_oslevel is not None else not is_root_dir_defined - ) - - def __repr__(self) -> str: - """Return repr of all info""" - return ( - "LinuxDistribution(" - "os_release_file={self.os_release_file!r}, " - "distro_release_file={self.distro_release_file!r}, " - "include_lsb={self.include_lsb!r}, " - "include_uname={self.include_uname!r}, " - "include_oslevel={self.include_oslevel!r}, " - "root_dir={self.root_dir!r}, " - "_os_release_info={self._os_release_info!r}, " - "_lsb_release_info={self._lsb_release_info!r}, " - "_distro_release_info={self._distro_release_info!r}, " - "_uname_info={self._uname_info!r}, " - "_oslevel_info={self._oslevel_info!r})".format(self=self) - ) - - def linux_distribution( - self, full_distribution_name: bool = True - ) -> Tuple[str, str, str]: - """ - Return information about the OS distribution that is compatible - with Python's :func:`platform.linux_distribution`, supporting a subset - of its parameters. - - For details, see :func:`distro.linux_distribution`. - """ - return ( - self.name() if full_distribution_name else self.id(), - self.version(), - self._os_release_info.get("release_codename") or self.codename(), - ) - - def id(self) -> str: - """Return the distro ID of the OS distribution, as a string. - - For details, see :func:`distro.id`. - """ - - def normalize(distro_id: str, table: Dict[str, str]) -> str: - distro_id = distro_id.lower().replace(" ", "_") - return table.get(distro_id, distro_id) - - distro_id = self.os_release_attr("id") - if distro_id: - return normalize(distro_id, NORMALIZED_OS_ID) - - distro_id = self.lsb_release_attr("distributor_id") - if distro_id: - return normalize(distro_id, NORMALIZED_LSB_ID) - - distro_id = self.distro_release_attr("id") - if distro_id: - return normalize(distro_id, NORMALIZED_DISTRO_ID) - - distro_id = self.uname_attr("id") - if distro_id: - return normalize(distro_id, NORMALIZED_DISTRO_ID) - - return "" - - def name(self, pretty: bool = False) -> str: - """ - Return the name of the OS distribution, as a string. - - For details, see :func:`distro.name`. - """ - name = ( - self.os_release_attr("name") - or self.lsb_release_attr("distributor_id") - or self.distro_release_attr("name") - or self.uname_attr("name") - ) - if pretty: - name = self.os_release_attr("pretty_name") or self.lsb_release_attr( - "description" - ) - if not name: - name = self.distro_release_attr("name") or self.uname_attr("name") - version = self.version(pretty=True) - if version: - name = f"{name} {version}" - return name or "" - - def version(self, pretty: bool = False, best: bool = False) -> str: - """ - Return the version of the OS distribution, as a string. - - For details, see :func:`distro.version`. - """ - versions = [ - self.os_release_attr("version_id"), - self.lsb_release_attr("release"), - self.distro_release_attr("version_id"), - self._parse_distro_release_content(self.os_release_attr("pretty_name")).get( - "version_id", "" - ), - self._parse_distro_release_content( - self.lsb_release_attr("description") - ).get("version_id", ""), - self.uname_attr("release"), - ] - if self.uname_attr("id").startswith("aix"): - # On AIX platforms, prefer oslevel command output. - versions.insert(0, self.oslevel_info()) - elif self.id() == "debian" or "debian" in self.like().split(): - # On Debian-like, add debian_version file content to candidates list. - versions.append(self._debian_version) - version = "" - if best: - # This algorithm uses the last version in priority order that has - # the best precision. If the versions are not in conflict, that - # does not matter; otherwise, using the last one instead of the - # first one might be considered a surprise. - for v in versions: - if v.count(".") > version.count(".") or version == "": - version = v - else: - for v in versions: - if v != "": - version = v - break - if pretty and version and self.codename(): - version = f"{version} ({self.codename()})" - return version - - def version_parts(self, best: bool = False) -> Tuple[str, str, str]: - """ - Return the version of the OS distribution, as a tuple of version - numbers. - - For details, see :func:`distro.version_parts`. - """ - version_str = self.version(best=best) - if version_str: - version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?") - matches = version_regex.match(version_str) - if matches: - major, minor, build_number = matches.groups() - return major, minor or "", build_number or "" - return "", "", "" - - def major_version(self, best: bool = False) -> str: - """ - Return the major version number of the current distribution. - - For details, see :func:`distro.major_version`. - """ - return self.version_parts(best)[0] - - def minor_version(self, best: bool = False) -> str: - """ - Return the minor version number of the current distribution. - - For details, see :func:`distro.minor_version`. - """ - return self.version_parts(best)[1] - - def build_number(self, best: bool = False) -> str: - """ - Return the build number of the current distribution. - - For details, see :func:`distro.build_number`. - """ - return self.version_parts(best)[2] - - def like(self) -> str: - """ - Return the IDs of distributions that are like the OS distribution. - - For details, see :func:`distro.like`. - """ - return self.os_release_attr("id_like") or "" - - def codename(self) -> str: - """ - Return the codename of the OS distribution. - - For details, see :func:`distro.codename`. - """ - try: - # Handle os_release specially since distros might purposefully set - # this to empty string to have no codename - return self._os_release_info["codename"] - except KeyError: - return ( - self.lsb_release_attr("codename") - or self.distro_release_attr("codename") - or "" - ) - - def info(self, pretty: bool = False, best: bool = False) -> InfoDict: - """ - Return certain machine-readable information about the OS - distribution. - - For details, see :func:`distro.info`. - """ - return InfoDict( - id=self.id(), - version=self.version(pretty, best), - version_parts=VersionDict( - major=self.major_version(best), - minor=self.minor_version(best), - build_number=self.build_number(best), - ), - like=self.like(), - codename=self.codename(), - ) - - def os_release_info(self) -> Dict[str, str]: - """ - Return a dictionary containing key-value pairs for the information - items from the os-release file data source of the OS distribution. - - For details, see :func:`distro.os_release_info`. - """ - return self._os_release_info - - def lsb_release_info(self) -> Dict[str, str]: - """ - Return a dictionary containing key-value pairs for the information - items from the lsb_release command data source of the OS - distribution. - - For details, see :func:`distro.lsb_release_info`. - """ - return self._lsb_release_info - - def distro_release_info(self) -> Dict[str, str]: - """ - Return a dictionary containing key-value pairs for the information - items from the distro release file data source of the OS - distribution. - - For details, see :func:`distro.distro_release_info`. - """ - return self._distro_release_info - - def uname_info(self) -> Dict[str, str]: - """ - Return a dictionary containing key-value pairs for the information - items from the uname command data source of the OS distribution. - - For details, see :func:`distro.uname_info`. - """ - return self._uname_info - - def oslevel_info(self) -> str: - """ - Return AIX' oslevel command output. - """ - return self._oslevel_info - - def os_release_attr(self, attribute: str) -> str: - """ - Return a single named information item from the os-release file data - source of the OS distribution. - - For details, see :func:`distro.os_release_attr`. - """ - return self._os_release_info.get(attribute, "") - - def lsb_release_attr(self, attribute: str) -> str: - """ - Return a single named information item from the lsb_release command - output data source of the OS distribution. - - For details, see :func:`distro.lsb_release_attr`. - """ - return self._lsb_release_info.get(attribute, "") - - def distro_release_attr(self, attribute: str) -> str: - """ - Return a single named information item from the distro release file - data source of the OS distribution. - - For details, see :func:`distro.distro_release_attr`. - """ - return self._distro_release_info.get(attribute, "") - - def uname_attr(self, attribute: str) -> str: - """ - Return a single named information item from the uname command - output data source of the OS distribution. - - For details, see :func:`distro.uname_attr`. - """ - return self._uname_info.get(attribute, "") - - @cached_property - def _os_release_info(self) -> Dict[str, str]: - """ - Get the information items from the specified os-release file. - - Returns: - A dictionary containing all information items. - """ - if os.path.isfile(self.os_release_file): - with open(self.os_release_file, encoding="utf-8") as release_file: - return self._parse_os_release_content(release_file) - return {} - - @staticmethod - def _parse_os_release_content(lines: TextIO) -> Dict[str, str]: - """ - Parse the lines of an os-release file. - - Parameters: - - * lines: Iterable through the lines in the os-release file. - Each line must be a unicode string or a UTF-8 encoded byte - string. - - Returns: - A dictionary containing all information items. - """ - props = {} - lexer = shlex.shlex(lines, posix=True) - lexer.whitespace_split = True - - tokens = list(lexer) - for token in tokens: - # At this point, all shell-like parsing has been done (i.e. - # comments processed, quotes and backslash escape sequences - # processed, multi-line values assembled, trailing newlines - # stripped, etc.), so the tokens are now either: - # * variable assignments: var=value - # * commands or their arguments (not allowed in os-release) - # Ignore any tokens that are not variable assignments - if "=" in token: - k, v = token.split("=", 1) - props[k.lower()] = v - - if "version" in props: - # extract release codename (if any) from version attribute - match = re.search(r"\((\D+)\)|,\s*(\D+)", props["version"]) - if match: - release_codename = match.group(1) or match.group(2) - props["codename"] = props["release_codename"] = release_codename - - if "version_codename" in props: - # os-release added a version_codename field. Use that in - # preference to anything else Note that some distros purposefully - # do not have code names. They should be setting - # version_codename="" - props["codename"] = props["version_codename"] - elif "ubuntu_codename" in props: - # Same as above but a non-standard field name used on older Ubuntus - props["codename"] = props["ubuntu_codename"] - - return props - - @cached_property - def _lsb_release_info(self) -> Dict[str, str]: - """ - Get the information items from the lsb_release command output. - - Returns: - A dictionary containing all information items. - """ - if not self.include_lsb: - return {} - try: - cmd = ("lsb_release", "-a") - stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) - # Command not found or lsb_release returned error - except (OSError, subprocess.CalledProcessError): - return {} - content = self._to_str(stdout).splitlines() - return self._parse_lsb_release_content(content) - - @staticmethod - def _parse_lsb_release_content(lines: Iterable[str]) -> Dict[str, str]: - """ - Parse the output of the lsb_release command. - - Parameters: - - * lines: Iterable through the lines of the lsb_release output. - Each line must be a unicode string or a UTF-8 encoded byte - string. - - Returns: - A dictionary containing all information items. - """ - props = {} - for line in lines: - kv = line.strip("\n").split(":", 1) - if len(kv) != 2: - # Ignore lines without colon. - continue - k, v = kv - props.update({k.replace(" ", "_").lower(): v.strip()}) - return props - - @cached_property - def _uname_info(self) -> Dict[str, str]: - if not self.include_uname: - return {} - try: - cmd = ("uname", "-rs") - stdout = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) - except OSError: - return {} - content = self._to_str(stdout).splitlines() - return self._parse_uname_content(content) - - @cached_property - def _oslevel_info(self) -> str: - if not self.include_oslevel: - return "" - try: - stdout = subprocess.check_output("oslevel", stderr=subprocess.DEVNULL) - except (OSError, subprocess.CalledProcessError): - return "" - return self._to_str(stdout).strip() - - @cached_property - def _debian_version(self) -> str: - try: - with open( - os.path.join(self.etc_dir, "debian_version"), encoding="ascii" - ) as fp: - return fp.readline().rstrip() - except FileNotFoundError: - return "" - - @staticmethod - def _parse_uname_content(lines: Sequence[str]) -> Dict[str, str]: - if not lines: - return {} - props = {} - match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip()) - if match: - name, version = match.groups() - - # This is to prevent the Linux kernel version from - # appearing as the 'best' version on otherwise - # identifiable distributions. - if name == "Linux": - return {} - props["id"] = name.lower() - props["name"] = name - props["release"] = version - return props - - @staticmethod - def _to_str(bytestring: bytes) -> str: - encoding = sys.getfilesystemencoding() - return bytestring.decode(encoding) - - @cached_property - def _distro_release_info(self) -> Dict[str, str]: - """ - Get the information items from the specified distro release file. - - Returns: - A dictionary containing all information items. - """ - if self.distro_release_file: - # If it was specified, we use it and parse what we can, even if - # its file name or content does not match the expected pattern. - distro_info = self._parse_distro_release_file(self.distro_release_file) - basename = os.path.basename(self.distro_release_file) - # The file name pattern for user-specified distro release files - # is somewhat more tolerant (compared to when searching for the - # file), because we want to use what was specified as best as - # possible. - match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - else: - try: - basenames = [ - basename - for basename in os.listdir(self.etc_dir) - if basename not in _DISTRO_RELEASE_IGNORE_BASENAMES - and os.path.isfile(os.path.join(self.etc_dir, basename)) - ] - # We sort for repeatability in cases where there are multiple - # distro specific files; e.g. CentOS, Oracle, Enterprise all - # containing `redhat-release` on top of their own. - basenames.sort() - except OSError: - # This may occur when /etc is not readable but we can't be - # sure about the *-release files. Check common entries of - # /etc for information. If they turn out to not be there the - # error is handled in `_parse_distro_release_file()`. - basenames = _DISTRO_RELEASE_BASENAMES - for basename in basenames: - match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename) - if match is None: - continue - filepath = os.path.join(self.etc_dir, basename) - distro_info = self._parse_distro_release_file(filepath) - # The name is always present if the pattern matches. - if "name" not in distro_info: - continue - self.distro_release_file = filepath - break - else: # the loop didn't "break": no candidate. - return {} - - if match is not None: - distro_info["id"] = match.group(1) - - # CloudLinux < 7: manually enrich info with proper id. - if "cloudlinux" in distro_info.get("name", "").lower(): - distro_info["id"] = "cloudlinux" - - return distro_info - - def _parse_distro_release_file(self, filepath: str) -> Dict[str, str]: - """ - Parse a distro release file. - - Parameters: - - * filepath: Path name of the distro release file. - - Returns: - A dictionary containing all information items. - """ - try: - with open(filepath, encoding="utf-8") as fp: - # Only parse the first line. For instance, on SLES there - # are multiple lines. We don't want them... - return self._parse_distro_release_content(fp.readline()) - except OSError: - # Ignore not being able to read a specific, seemingly version - # related file. - # See https://github.com/python-distro/distro/issues/162 - return {} - - @staticmethod - def _parse_distro_release_content(line: str) -> Dict[str, str]: - """ - Parse a line from a distro release file. - - Parameters: - * line: Line from the distro release file. Must be a unicode string - or a UTF-8 encoded byte string. - - Returns: - A dictionary containing all information items. - """ - matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1]) - distro_info = {} - if matches: - # regexp ensures non-None - distro_info["name"] = matches.group(3)[::-1] - if matches.group(2): - distro_info["version_id"] = matches.group(2)[::-1] - if matches.group(1): - distro_info["codename"] = matches.group(1)[::-1] - elif line: - distro_info["name"] = line.strip() - return distro_info - - -_distro = LinuxDistribution() - - -def main() -> None: - logger = logging.getLogger(__name__) - logger.setLevel(logging.DEBUG) - logger.addHandler(logging.StreamHandler(sys.stdout)) - - parser = argparse.ArgumentParser(description="OS distro info tool") - parser.add_argument( - "--json", "-j", help="Output in machine readable format", action="store_true" - ) - - parser.add_argument( - "--root-dir", - "-r", - type=str, - dest="root_dir", - help="Path to the root filesystem directory (defaults to /)", - ) - - args = parser.parse_args() - - if args.root_dir: - dist = LinuxDistribution( - include_lsb=False, - include_uname=False, - include_oslevel=False, - root_dir=args.root_dir, - ) - else: - dist = _distro - - if args.json: - logger.info(json.dumps(dist.info(), indent=4, sort_keys=True)) - else: - logger.info("Name: %s", dist.name(pretty=True)) - distribution_version = dist.version(pretty=True) - logger.info("Version: %s", distribution_version) - distribution_codename = dist.codename() - logger.info("Codename: %s", distribution_codename) - - -if __name__ == "__main__": - main() diff --git a/myenv/Lib/site-packages/distro/py.typed b/myenv/Lib/site-packages/distro/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/dotenv/__init__.py b/myenv/Lib/site-packages/dotenv/__init__.py deleted file mode 100644 index 7f4c631..0000000 --- a/myenv/Lib/site-packages/dotenv/__init__.py +++ /dev/null @@ -1,49 +0,0 @@ -from typing import Any, Optional - -from .main import (dotenv_values, find_dotenv, get_key, load_dotenv, set_key, - unset_key) - - -def load_ipython_extension(ipython: Any) -> None: - from .ipython import load_ipython_extension - load_ipython_extension(ipython) - - -def get_cli_string( - path: Optional[str] = None, - action: Optional[str] = None, - key: Optional[str] = None, - value: Optional[str] = None, - quote: Optional[str] = None, -): - """Returns a string suitable for running as a shell script. - - Useful for converting a arguments passed to a fabric task - to be passed to a `local` or `run` command. - """ - command = ['dotenv'] - if quote: - command.append(f'-q {quote}') - if path: - command.append(f'-f {path}') - if action: - command.append(action) - if key: - command.append(key) - if value: - if ' ' in value: - command.append(f'"{value}"') - else: - command.append(value) - - return ' '.join(command).strip() - - -__all__ = ['get_cli_string', - 'load_dotenv', - 'dotenv_values', - 'get_key', - 'set_key', - 'unset_key', - 'find_dotenv', - 'load_ipython_extension'] diff --git a/myenv/Lib/site-packages/dotenv/__main__.py b/myenv/Lib/site-packages/dotenv/__main__.py deleted file mode 100644 index 3977f55..0000000 --- a/myenv/Lib/site-packages/dotenv/__main__.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Entry point for cli, enables execution with `python -m dotenv`""" - -from .cli import cli - -if __name__ == "__main__": - cli() diff --git a/myenv/Lib/site-packages/dotenv/cli.py b/myenv/Lib/site-packages/dotenv/cli.py deleted file mode 100644 index 65ead46..0000000 --- a/myenv/Lib/site-packages/dotenv/cli.py +++ /dev/null @@ -1,199 +0,0 @@ -import json -import os -import shlex -import sys -from contextlib import contextmanager -from subprocess import Popen -from typing import Any, Dict, IO, Iterator, List - -try: - import click -except ImportError: - sys.stderr.write('It seems python-dotenv is not installed with cli option. \n' - 'Run pip install "python-dotenv[cli]" to fix this.') - sys.exit(1) - -from .main import dotenv_values, set_key, unset_key -from .version import __version__ - - -def enumerate_env(): - """ - Return a path for the ${pwd}/.env file. - - If pwd does not exist, return None. - """ - try: - cwd = os.getcwd() - except FileNotFoundError: - return None - path = os.path.join(cwd, '.env') - return path - - -@click.group() -@click.option('-f', '--file', default=enumerate_env(), - type=click.Path(file_okay=True), - help="Location of the .env file, defaults to .env file in current working directory.") -@click.option('-q', '--quote', default='always', - type=click.Choice(['always', 'never', 'auto']), - help="Whether to quote or not the variable values. Default mode is always. This does not affect parsing.") -@click.option('-e', '--export', default=False, - type=click.BOOL, - help="Whether to write the dot file as an executable bash script.") -@click.version_option(version=__version__) -@click.pass_context -def cli(ctx: click.Context, file: Any, quote: Any, export: Any) -> None: - """This script is used to set, get or unset values from a .env file.""" - ctx.obj = {'QUOTE': quote, 'EXPORT': export, 'FILE': file} - - -@contextmanager -def stream_file(path: os.PathLike) -> Iterator[IO[str]]: - """ - Open a file and yield the corresponding (decoded) stream. - - Exits with error code 2 if the file cannot be opened. - """ - - try: - with open(path) as stream: - yield stream - except OSError as exc: - print(f"Error opening env file: {exc}", file=sys.stderr) - exit(2) - - -@cli.command() -@click.pass_context -@click.option('--format', default='simple', - type=click.Choice(['simple', 'json', 'shell', 'export']), - help="The format in which to display the list. Default format is simple, " - "which displays name=value without quotes.") -def list(ctx: click.Context, format: bool) -> None: - """Display all the stored key/value.""" - file = ctx.obj['FILE'] - - with stream_file(file) as stream: - values = dotenv_values(stream=stream) - - if format == 'json': - click.echo(json.dumps(values, indent=2, sort_keys=True)) - else: - prefix = 'export ' if format == 'export' else '' - for k in sorted(values): - v = values[k] - if v is not None: - if format in ('export', 'shell'): - v = shlex.quote(v) - click.echo(f'{prefix}{k}={v}') - - -@cli.command() -@click.pass_context -@click.argument('key', required=True) -@click.argument('value', required=True) -def set(ctx: click.Context, key: Any, value: Any) -> None: - """Store the given key/value.""" - file = ctx.obj['FILE'] - quote = ctx.obj['QUOTE'] - export = ctx.obj['EXPORT'] - success, key, value = set_key(file, key, value, quote, export) - if success: - click.echo(f'{key}={value}') - else: - exit(1) - - -@cli.command() -@click.pass_context -@click.argument('key', required=True) -def get(ctx: click.Context, key: Any) -> None: - """Retrieve the value for the given key.""" - file = ctx.obj['FILE'] - - with stream_file(file) as stream: - values = dotenv_values(stream=stream) - - stored_value = values.get(key) - if stored_value: - click.echo(stored_value) - else: - exit(1) - - -@cli.command() -@click.pass_context -@click.argument('key', required=True) -def unset(ctx: click.Context, key: Any) -> None: - """Removes the given key.""" - file = ctx.obj['FILE'] - quote = ctx.obj['QUOTE'] - success, key = unset_key(file, key, quote) - if success: - click.echo(f"Successfully removed {key}") - else: - exit(1) - - -@cli.command(context_settings={'ignore_unknown_options': True}) -@click.pass_context -@click.option( - "--override/--no-override", - default=True, - help="Override variables from the environment file with those from the .env file.", -) -@click.argument('commandline', nargs=-1, type=click.UNPROCESSED) -def run(ctx: click.Context, override: bool, commandline: List[str]) -> None: - """Run command with environment variables present.""" - file = ctx.obj['FILE'] - if not os.path.isfile(file): - raise click.BadParameter( - f'Invalid value for \'-f\' "{file}" does not exist.', - ctx=ctx - ) - dotenv_as_dict = { - k: v - for (k, v) in dotenv_values(file).items() - if v is not None and (override or k not in os.environ) - } - - if not commandline: - click.echo('No command given.') - exit(1) - ret = run_command(commandline, dotenv_as_dict) - exit(ret) - - -def run_command(command: List[str], env: Dict[str, str]) -> int: - """Run command in sub process. - - Runs the command in a sub process with the variables from `env` - added in the current environment variables. - - Parameters - ---------- - command: List[str] - The command and it's parameters - env: Dict - The additional environment variables - - Returns - ------- - int - The return code of the command - - """ - # copy the current environment variables and add the vales from - # `env` - cmd_env = os.environ.copy() - cmd_env.update(env) - - p = Popen(command, - universal_newlines=True, - bufsize=0, - shell=False, - env=cmd_env) - _, _ = p.communicate() - - return p.returncode diff --git a/myenv/Lib/site-packages/dotenv/ipython.py b/myenv/Lib/site-packages/dotenv/ipython.py deleted file mode 100644 index 7df727c..0000000 --- a/myenv/Lib/site-packages/dotenv/ipython.py +++ /dev/null @@ -1,39 +0,0 @@ -from IPython.core.magic import Magics, line_magic, magics_class # type: ignore -from IPython.core.magic_arguments import (argument, magic_arguments, # type: ignore - parse_argstring) # type: ignore - -from .main import find_dotenv, load_dotenv - - -@magics_class -class IPythonDotEnv(Magics): - - @magic_arguments() - @argument( - '-o', '--override', action='store_true', - help="Indicate to override existing variables" - ) - @argument( - '-v', '--verbose', action='store_true', - help="Indicate function calls to be verbose" - ) - @argument('dotenv_path', nargs='?', type=str, default='.env', - help='Search in increasingly higher folders for the `dotenv_path`') - @line_magic - def dotenv(self, line): - args = parse_argstring(self.dotenv, line) - # Locate the .env file - dotenv_path = args.dotenv_path - try: - dotenv_path = find_dotenv(dotenv_path, True, True) - except IOError: - print("cannot find .env file") - return - - # Load the .env file - load_dotenv(dotenv_path, verbose=args.verbose, override=args.override) - - -def load_ipython_extension(ipython): - """Register the %dotenv magic.""" - ipython.register_magics(IPythonDotEnv) diff --git a/myenv/Lib/site-packages/dotenv/main.py b/myenv/Lib/site-packages/dotenv/main.py deleted file mode 100644 index 7bc5428..0000000 --- a/myenv/Lib/site-packages/dotenv/main.py +++ /dev/null @@ -1,392 +0,0 @@ -import io -import logging -import os -import pathlib -import shutil -import sys -import tempfile -from collections import OrderedDict -from contextlib import contextmanager -from typing import (IO, Dict, Iterable, Iterator, Mapping, Optional, Tuple, - Union) - -from .parser import Binding, parse_stream -from .variables import parse_variables - -# A type alias for a string path to be used for the paths in this file. -# These paths may flow to `open()` and `shutil.move()`; `shutil.move()` -# only accepts string paths, not byte paths or file descriptors. See -# https://github.com/python/typeshed/pull/6832. -StrPath = Union[str, 'os.PathLike[str]'] - -logger = logging.getLogger(__name__) - - -def with_warn_for_invalid_lines(mappings: Iterator[Binding]) -> Iterator[Binding]: - for mapping in mappings: - if mapping.error: - logger.warning( - "Python-dotenv could not parse statement starting at line %s", - mapping.original.line, - ) - yield mapping - - -class DotEnv: - def __init__( - self, - dotenv_path: Optional[StrPath], - stream: Optional[IO[str]] = None, - verbose: bool = False, - encoding: Optional[str] = None, - interpolate: bool = True, - override: bool = True, - ) -> None: - self.dotenv_path: Optional[StrPath] = dotenv_path - self.stream: Optional[IO[str]] = stream - self._dict: Optional[Dict[str, Optional[str]]] = None - self.verbose: bool = verbose - self.encoding: Optional[str] = encoding - self.interpolate: bool = interpolate - self.override: bool = override - - @contextmanager - def _get_stream(self) -> Iterator[IO[str]]: - if self.dotenv_path and os.path.isfile(self.dotenv_path): - with open(self.dotenv_path, encoding=self.encoding) as stream: - yield stream - elif self.stream is not None: - yield self.stream - else: - if self.verbose: - logger.info( - "Python-dotenv could not find configuration file %s.", - self.dotenv_path or '.env', - ) - yield io.StringIO('') - - def dict(self) -> Dict[str, Optional[str]]: - """Return dotenv as dict""" - if self._dict: - return self._dict - - raw_values = self.parse() - - if self.interpolate: - self._dict = OrderedDict(resolve_variables(raw_values, override=self.override)) - else: - self._dict = OrderedDict(raw_values) - - return self._dict - - def parse(self) -> Iterator[Tuple[str, Optional[str]]]: - with self._get_stream() as stream: - for mapping in with_warn_for_invalid_lines(parse_stream(stream)): - if mapping.key is not None: - yield mapping.key, mapping.value - - def set_as_environment_variables(self) -> bool: - """ - Load the current dotenv as system environment variable. - """ - if not self.dict(): - return False - - for k, v in self.dict().items(): - if k in os.environ and not self.override: - continue - if v is not None: - os.environ[k] = v - - return True - - def get(self, key: str) -> Optional[str]: - """ - """ - data = self.dict() - - if key in data: - return data[key] - - if self.verbose: - logger.warning("Key %s not found in %s.", key, self.dotenv_path) - - return None - - -def get_key( - dotenv_path: StrPath, - key_to_get: str, - encoding: Optional[str] = "utf-8", -) -> Optional[str]: - """ - Get the value of a given key from the given .env. - - Returns `None` if the key isn't found or doesn't have a value. - """ - return DotEnv(dotenv_path, verbose=True, encoding=encoding).get(key_to_get) - - -@contextmanager -def rewrite( - path: StrPath, - encoding: Optional[str], -) -> Iterator[Tuple[IO[str], IO[str]]]: - pathlib.Path(path).touch() - - with tempfile.NamedTemporaryFile(mode="w", encoding=encoding, delete=False) as dest: - error = None - try: - with open(path, encoding=encoding) as source: - yield (source, dest) - except BaseException as err: - error = err - - if error is None: - shutil.move(dest.name, path) - else: - os.unlink(dest.name) - raise error from None - - -def set_key( - dotenv_path: StrPath, - key_to_set: str, - value_to_set: str, - quote_mode: str = "always", - export: bool = False, - encoding: Optional[str] = "utf-8", -) -> Tuple[Optional[bool], str, str]: - """ - Adds or Updates a key/value to the given .env - - If the .env path given doesn't exist, fails instead of risking creating - an orphan .env somewhere in the filesystem - """ - if quote_mode not in ("always", "auto", "never"): - raise ValueError(f"Unknown quote_mode: {quote_mode}") - - quote = ( - quote_mode == "always" - or (quote_mode == "auto" and not value_to_set.isalnum()) - ) - - if quote: - value_out = "'{}'".format(value_to_set.replace("'", "\\'")) - else: - value_out = value_to_set - if export: - line_out = f'export {key_to_set}={value_out}\n' - else: - line_out = f"{key_to_set}={value_out}\n" - - with rewrite(dotenv_path, encoding=encoding) as (source, dest): - replaced = False - missing_newline = False - for mapping in with_warn_for_invalid_lines(parse_stream(source)): - if mapping.key == key_to_set: - dest.write(line_out) - replaced = True - else: - dest.write(mapping.original.string) - missing_newline = not mapping.original.string.endswith("\n") - if not replaced: - if missing_newline: - dest.write("\n") - dest.write(line_out) - - return True, key_to_set, value_to_set - - -def unset_key( - dotenv_path: StrPath, - key_to_unset: str, - quote_mode: str = "always", - encoding: Optional[str] = "utf-8", -) -> Tuple[Optional[bool], str]: - """ - Removes a given key from the given `.env` file. - - If the .env path given doesn't exist, fails. - If the given key doesn't exist in the .env, fails. - """ - if not os.path.exists(dotenv_path): - logger.warning("Can't delete from %s - it doesn't exist.", dotenv_path) - return None, key_to_unset - - removed = False - with rewrite(dotenv_path, encoding=encoding) as (source, dest): - for mapping in with_warn_for_invalid_lines(parse_stream(source)): - if mapping.key == key_to_unset: - removed = True - else: - dest.write(mapping.original.string) - - if not removed: - logger.warning("Key %s not removed from %s - key doesn't exist.", key_to_unset, dotenv_path) - return None, key_to_unset - - return removed, key_to_unset - - -def resolve_variables( - values: Iterable[Tuple[str, Optional[str]]], - override: bool, -) -> Mapping[str, Optional[str]]: - new_values: Dict[str, Optional[str]] = {} - - for (name, value) in values: - if value is None: - result = None - else: - atoms = parse_variables(value) - env: Dict[str, Optional[str]] = {} - if override: - env.update(os.environ) # type: ignore - env.update(new_values) - else: - env.update(new_values) - env.update(os.environ) # type: ignore - result = "".join(atom.resolve(env) for atom in atoms) - - new_values[name] = result - - return new_values - - -def _walk_to_root(path: str) -> Iterator[str]: - """ - Yield directories starting from the given directory up to the root - """ - if not os.path.exists(path): - raise IOError('Starting path not found') - - if os.path.isfile(path): - path = os.path.dirname(path) - - last_dir = None - current_dir = os.path.abspath(path) - while last_dir != current_dir: - yield current_dir - parent_dir = os.path.abspath(os.path.join(current_dir, os.path.pardir)) - last_dir, current_dir = current_dir, parent_dir - - -def find_dotenv( - filename: str = '.env', - raise_error_if_not_found: bool = False, - usecwd: bool = False, -) -> str: - """ - Search in increasingly higher folders for the given file - - Returns path to the file if found, or an empty string otherwise - """ - - def _is_interactive(): - """ Decide whether this is running in a REPL or IPython notebook """ - try: - main = __import__('__main__', None, None, fromlist=['__file__']) - except ModuleNotFoundError: - return False - return not hasattr(main, '__file__') - - if usecwd or _is_interactive() or getattr(sys, 'frozen', False): - # Should work without __file__, e.g. in REPL or IPython notebook. - path = os.getcwd() - else: - # will work for .py files - frame = sys._getframe() - current_file = __file__ - - while frame.f_code.co_filename == current_file or not os.path.exists( - frame.f_code.co_filename - ): - assert frame.f_back is not None - frame = frame.f_back - frame_filename = frame.f_code.co_filename - path = os.path.dirname(os.path.abspath(frame_filename)) - - for dirname in _walk_to_root(path): - check_path = os.path.join(dirname, filename) - if os.path.isfile(check_path): - return check_path - - if raise_error_if_not_found: - raise IOError('File not found') - - return '' - - -def load_dotenv( - dotenv_path: Optional[StrPath] = None, - stream: Optional[IO[str]] = None, - verbose: bool = False, - override: bool = False, - interpolate: bool = True, - encoding: Optional[str] = "utf-8", -) -> bool: - """Parse a .env file and then load all the variables found as environment variables. - - Parameters: - dotenv_path: Absolute or relative path to .env file. - stream: Text stream (such as `io.StringIO`) with .env content, used if - `dotenv_path` is `None`. - verbose: Whether to output a warning the .env file is missing. - override: Whether to override the system environment variables with the variables - from the `.env` file. - encoding: Encoding to be used to read the file. - Returns: - Bool: True if at least one environment variable is set else False - - If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the - .env file. - """ - if dotenv_path is None and stream is None: - dotenv_path = find_dotenv() - - dotenv = DotEnv( - dotenv_path=dotenv_path, - stream=stream, - verbose=verbose, - interpolate=interpolate, - override=override, - encoding=encoding, - ) - return dotenv.set_as_environment_variables() - - -def dotenv_values( - dotenv_path: Optional[StrPath] = None, - stream: Optional[IO[str]] = None, - verbose: bool = False, - interpolate: bool = True, - encoding: Optional[str] = "utf-8", -) -> Dict[str, Optional[str]]: - """ - Parse a .env file and return its content as a dict. - - The returned dict will have `None` values for keys without values in the .env file. - For example, `foo=bar` results in `{"foo": "bar"}` whereas `foo` alone results in - `{"foo": None}` - - Parameters: - dotenv_path: Absolute or relative path to the .env file. - stream: `StringIO` object with .env content, used if `dotenv_path` is `None`. - verbose: Whether to output a warning if the .env file is missing. - encoding: Encoding to be used to read the file. - - If both `dotenv_path` and `stream` are `None`, `find_dotenv()` is used to find the - .env file. - """ - if dotenv_path is None and stream is None: - dotenv_path = find_dotenv() - - return DotEnv( - dotenv_path=dotenv_path, - stream=stream, - verbose=verbose, - interpolate=interpolate, - override=True, - encoding=encoding, - ).dict() diff --git a/myenv/Lib/site-packages/dotenv/parser.py b/myenv/Lib/site-packages/dotenv/parser.py deleted file mode 100644 index 735f14a..0000000 --- a/myenv/Lib/site-packages/dotenv/parser.py +++ /dev/null @@ -1,175 +0,0 @@ -import codecs -import re -from typing import (IO, Iterator, Match, NamedTuple, Optional, # noqa:F401 - Pattern, Sequence, Tuple) - - -def make_regex(string: str, extra_flags: int = 0) -> Pattern[str]: - return re.compile(string, re.UNICODE | extra_flags) - - -_newline = make_regex(r"(\r\n|\n|\r)") -_multiline_whitespace = make_regex(r"\s*", extra_flags=re.MULTILINE) -_whitespace = make_regex(r"[^\S\r\n]*") -_export = make_regex(r"(?:export[^\S\r\n]+)?") -_single_quoted_key = make_regex(r"'([^']+)'") -_unquoted_key = make_regex(r"([^=\#\s]+)") -_equal_sign = make_regex(r"(=[^\S\r\n]*)") -_single_quoted_value = make_regex(r"'((?:\\'|[^'])*)'") -_double_quoted_value = make_regex(r'"((?:\\"|[^"])*)"') -_unquoted_value = make_regex(r"([^\r\n]*)") -_comment = make_regex(r"(?:[^\S\r\n]*#[^\r\n]*)?") -_end_of_line = make_regex(r"[^\S\r\n]*(?:\r\n|\n|\r|$)") -_rest_of_line = make_regex(r"[^\r\n]*(?:\r|\n|\r\n)?") -_double_quote_escapes = make_regex(r"\\[\\'\"abfnrtv]") -_single_quote_escapes = make_regex(r"\\[\\']") - - -class Original(NamedTuple): - string: str - line: int - - -class Binding(NamedTuple): - key: Optional[str] - value: Optional[str] - original: Original - error: bool - - -class Position: - def __init__(self, chars: int, line: int) -> None: - self.chars = chars - self.line = line - - @classmethod - def start(cls) -> "Position": - return cls(chars=0, line=1) - - def set(self, other: "Position") -> None: - self.chars = other.chars - self.line = other.line - - def advance(self, string: str) -> None: - self.chars += len(string) - self.line += len(re.findall(_newline, string)) - - -class Error(Exception): - pass - - -class Reader: - def __init__(self, stream: IO[str]) -> None: - self.string = stream.read() - self.position = Position.start() - self.mark = Position.start() - - def has_next(self) -> bool: - return self.position.chars < len(self.string) - - def set_mark(self) -> None: - self.mark.set(self.position) - - def get_marked(self) -> Original: - return Original( - string=self.string[self.mark.chars:self.position.chars], - line=self.mark.line, - ) - - def peek(self, count: int) -> str: - return self.string[self.position.chars:self.position.chars + count] - - def read(self, count: int) -> str: - result = self.string[self.position.chars:self.position.chars + count] - if len(result) < count: - raise Error("read: End of string") - self.position.advance(result) - return result - - def read_regex(self, regex: Pattern[str]) -> Sequence[str]: - match = regex.match(self.string, self.position.chars) - if match is None: - raise Error("read_regex: Pattern not found") - self.position.advance(self.string[match.start():match.end()]) - return match.groups() - - -def decode_escapes(regex: Pattern[str], string: str) -> str: - def decode_match(match: Match[str]) -> str: - return codecs.decode(match.group(0), 'unicode-escape') # type: ignore - - return regex.sub(decode_match, string) - - -def parse_key(reader: Reader) -> Optional[str]: - char = reader.peek(1) - if char == "#": - return None - elif char == "'": - (key,) = reader.read_regex(_single_quoted_key) - else: - (key,) = reader.read_regex(_unquoted_key) - return key - - -def parse_unquoted_value(reader: Reader) -> str: - (part,) = reader.read_regex(_unquoted_value) - return re.sub(r"\s+#.*", "", part).rstrip() - - -def parse_value(reader: Reader) -> str: - char = reader.peek(1) - if char == u"'": - (value,) = reader.read_regex(_single_quoted_value) - return decode_escapes(_single_quote_escapes, value) - elif char == u'"': - (value,) = reader.read_regex(_double_quoted_value) - return decode_escapes(_double_quote_escapes, value) - elif char in (u"", u"\n", u"\r"): - return u"" - else: - return parse_unquoted_value(reader) - - -def parse_binding(reader: Reader) -> Binding: - reader.set_mark() - try: - reader.read_regex(_multiline_whitespace) - if not reader.has_next(): - return Binding( - key=None, - value=None, - original=reader.get_marked(), - error=False, - ) - reader.read_regex(_export) - key = parse_key(reader) - reader.read_regex(_whitespace) - if reader.peek(1) == "=": - reader.read_regex(_equal_sign) - value: Optional[str] = parse_value(reader) - else: - value = None - reader.read_regex(_comment) - reader.read_regex(_end_of_line) - return Binding( - key=key, - value=value, - original=reader.get_marked(), - error=False, - ) - except Error: - reader.read_regex(_rest_of_line) - return Binding( - key=None, - value=None, - original=reader.get_marked(), - error=True, - ) - - -def parse_stream(stream: IO[str]) -> Iterator[Binding]: - reader = Reader(stream) - while reader.has_next(): - yield parse_binding(reader) diff --git a/myenv/Lib/site-packages/dotenv/py.typed b/myenv/Lib/site-packages/dotenv/py.typed deleted file mode 100644 index 7632ecf..0000000 --- a/myenv/Lib/site-packages/dotenv/py.typed +++ /dev/null @@ -1 +0,0 @@ -# Marker file for PEP 561 diff --git a/myenv/Lib/site-packages/dotenv/variables.py b/myenv/Lib/site-packages/dotenv/variables.py deleted file mode 100644 index 667f2f2..0000000 --- a/myenv/Lib/site-packages/dotenv/variables.py +++ /dev/null @@ -1,86 +0,0 @@ -import re -from abc import ABCMeta, abstractmethod -from typing import Iterator, Mapping, Optional, Pattern - -_posix_variable: Pattern[str] = re.compile( - r""" - \$\{ - (?P[^\}:]*) - (?::- - (?P[^\}]*) - )? - \} - """, - re.VERBOSE, -) - - -class Atom(metaclass=ABCMeta): - def __ne__(self, other: object) -> bool: - result = self.__eq__(other) - if result is NotImplemented: - return NotImplemented - return not result - - @abstractmethod - def resolve(self, env: Mapping[str, Optional[str]]) -> str: ... - - -class Literal(Atom): - def __init__(self, value: str) -> None: - self.value = value - - def __repr__(self) -> str: - return f"Literal(value={self.value})" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, self.__class__): - return NotImplemented - return self.value == other.value - - def __hash__(self) -> int: - return hash((self.__class__, self.value)) - - def resolve(self, env: Mapping[str, Optional[str]]) -> str: - return self.value - - -class Variable(Atom): - def __init__(self, name: str, default: Optional[str]) -> None: - self.name = name - self.default = default - - def __repr__(self) -> str: - return f"Variable(name={self.name}, default={self.default})" - - def __eq__(self, other: object) -> bool: - if not isinstance(other, self.__class__): - return NotImplemented - return (self.name, self.default) == (other.name, other.default) - - def __hash__(self) -> int: - return hash((self.__class__, self.name, self.default)) - - def resolve(self, env: Mapping[str, Optional[str]]) -> str: - default = self.default if self.default is not None else "" - result = env.get(self.name, default) - return result if result is not None else "" - - -def parse_variables(value: str) -> Iterator[Atom]: - cursor = 0 - - for match in _posix_variable.finditer(value): - (start, end) = match.span() - name = match["name"] - default = match["default"] - - if start > cursor: - yield Literal(value=value[cursor:start]) - - yield Variable(name=name, default=default) - cursor = end - - length = len(value) - if cursor < length: - yield Literal(value=value[cursor:length]) diff --git a/myenv/Lib/site-packages/dotenv/version.py b/myenv/Lib/site-packages/dotenv/version.py deleted file mode 100644 index 5c4105c..0000000 --- a/myenv/Lib/site-packages/dotenv/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "1.0.1" diff --git a/myenv/Lib/site-packages/flask-3.0.3.dist-info/INSTALLER b/myenv/Lib/site-packages/flask-3.0.3.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/flask-3.0.3.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/flask-3.0.3.dist-info/LICENSE.txt b/myenv/Lib/site-packages/flask-3.0.3.dist-info/LICENSE.txt deleted file mode 100644 index 9d227a0..0000000 --- a/myenv/Lib/site-packages/flask-3.0.3.dist-info/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/Lib/site-packages/flask-3.0.3.dist-info/METADATA b/myenv/Lib/site-packages/flask-3.0.3.dist-info/METADATA deleted file mode 100644 index 5a02107..0000000 --- a/myenv/Lib/site-packages/flask-3.0.3.dist-info/METADATA +++ /dev/null @@ -1,101 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask -Version: 3.0.3 -Summary: A simple framework for building complex web applications. -Maintainer-email: Pallets -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Framework :: Flask -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI -Classifier: Topic :: Internet :: WWW/HTTP :: WSGI :: Application -Classifier: Topic :: Software Development :: Libraries :: Application Frameworks -Classifier: Typing :: Typed -Requires-Dist: Werkzeug>=3.0.0 -Requires-Dist: Jinja2>=3.1.2 -Requires-Dist: itsdangerous>=2.1.2 -Requires-Dist: click>=8.1.3 -Requires-Dist: blinker>=1.6.2 -Requires-Dist: importlib-metadata>=3.6.0; python_version < '3.10' -Requires-Dist: asgiref>=3.2 ; extra == "async" -Requires-Dist: python-dotenv ; extra == "dotenv" -Project-URL: Changes, https://flask.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://flask.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/flask/ -Provides-Extra: async -Provides-Extra: dotenv - -# Flask - -Flask is a lightweight [WSGI][] web application framework. It is designed -to make getting started quick and easy, with the ability to scale up to -complex applications. It began as a simple wrapper around [Werkzeug][] -and [Jinja][], and has become one of the most popular Python web -application frameworks. - -Flask offers suggestions, but doesn't enforce any dependencies or -project layout. It is up to the developer to choose the tools and -libraries they want to use. There are many extensions provided by the -community that make adding new functionality easy. - -[WSGI]: https://wsgi.readthedocs.io/ -[Werkzeug]: https://werkzeug.palletsprojects.com/ -[Jinja]: https://jinja.palletsprojects.com/ - - -## Installing - -Install and update from [PyPI][] using an installer such as [pip][]: - -``` -$ pip install -U Flask -``` - -[PyPI]: https://pypi.org/project/Flask/ -[pip]: https://pip.pypa.io/en/stable/getting-started/ - - -## A Simple Example - -```python -# save this as app.py -from flask import Flask - -app = Flask(__name__) - -@app.route("/") -def hello(): - return "Hello, World!" -``` - -``` -$ flask run - * Running on http://127.0.0.1:5000/ (Press CTRL+C to quit) -``` - - -## Contributing - -For guidance on setting up a development environment and how to make a -contribution to Flask, see the [contributing guidelines][]. - -[contributing guidelines]: https://github.com/pallets/flask/blob/main/CONTRIBUTING.rst - - -## Donate - -The Pallets organization develops and supports Flask and the libraries -it uses. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today][]. - -[please donate today]: https://palletsprojects.com/donate - diff --git a/myenv/Lib/site-packages/flask-3.0.3.dist-info/RECORD b/myenv/Lib/site-packages/flask-3.0.3.dist-info/RECORD deleted file mode 100644 index 2f016b8..0000000 --- a/myenv/Lib/site-packages/flask-3.0.3.dist-info/RECORD +++ /dev/null @@ -1,58 +0,0 @@ -../../Scripts/flask.exe,sha256=9L3IsXb5GDBHTiIEqrptP_L6G4nMFJ7AbcVEpJ4ENlw,108439 -flask-3.0.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -flask-3.0.3.dist-info/LICENSE.txt,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -flask-3.0.3.dist-info/METADATA,sha256=exPahy4aahjV-mYqd9qb5HNP8haB_IxTuaotoSvCtag,3177 -flask-3.0.3.dist-info/RECORD,, -flask-3.0.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask-3.0.3.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -flask-3.0.3.dist-info/entry_points.txt,sha256=bBP7hTOS5fz9zLtC7sPofBZAlMkEvBxu7KqS6l5lvc4,40 -flask/__init__.py,sha256=6xMqdVA0FIQ2U1KVaGX3lzNCdXPzoHPaa0hvQCNcfSk,2625 -flask/__main__.py,sha256=bYt9eEaoRQWdejEHFD8REx9jxVEdZptECFsV7F49Ink,30 -flask/__pycache__/__init__.cpython-312.pyc,, -flask/__pycache__/__main__.cpython-312.pyc,, -flask/__pycache__/app.cpython-312.pyc,, -flask/__pycache__/blueprints.cpython-312.pyc,, -flask/__pycache__/cli.cpython-312.pyc,, -flask/__pycache__/config.cpython-312.pyc,, -flask/__pycache__/ctx.cpython-312.pyc,, -flask/__pycache__/debughelpers.cpython-312.pyc,, -flask/__pycache__/globals.cpython-312.pyc,, -flask/__pycache__/helpers.cpython-312.pyc,, -flask/__pycache__/logging.cpython-312.pyc,, -flask/__pycache__/sessions.cpython-312.pyc,, -flask/__pycache__/signals.cpython-312.pyc,, -flask/__pycache__/templating.cpython-312.pyc,, -flask/__pycache__/testing.cpython-312.pyc,, -flask/__pycache__/typing.cpython-312.pyc,, -flask/__pycache__/views.cpython-312.pyc,, -flask/__pycache__/wrappers.cpython-312.pyc,, -flask/app.py,sha256=7-lh6cIj27riTE1Q18Ok1p5nOZ8qYiMux4Btc6o6mNc,60143 -flask/blueprints.py,sha256=7INXPwTkUxfOQXOOv1yu52NpHPmPGI5fMTMFZ-BG9yY,4430 -flask/cli.py,sha256=OOaf_Efqih1i2in58j-5ZZZmQnPpaSfiUFbEjlL9bzw,35825 -flask/config.py,sha256=bLzLVAj-cq-Xotu9erqOFte0xSFaVXyfz0AkP4GbwmY,13312 -flask/ctx.py,sha256=4atDhJJ_cpV1VMq4qsfU4E_61M1oN93jlS2H9gjrl58,15120 -flask/debughelpers.py,sha256=PGIDhStW_efRjpaa3zHIpo-htStJOR41Ip3OJWPYBwo,6080 -flask/globals.py,sha256=XdQZmStBmPIs8t93tjx6pO7Bm3gobAaONWkFcUHaGas,1713 -flask/helpers.py,sha256=tYrcQ_73GuSZVEgwFr-eMmV69UriFQDBmt8wZJIAqvg,23084 -flask/json/__init__.py,sha256=hLNR898paqoefdeAhraa5wyJy-bmRB2k2dV4EgVy2Z8,5602 -flask/json/__pycache__/__init__.cpython-312.pyc,, -flask/json/__pycache__/provider.cpython-312.pyc,, -flask/json/__pycache__/tag.cpython-312.pyc,, -flask/json/provider.py,sha256=q6iB83lSiopy80DZPrU-9mGcWwrD0mvLjiv9fHrRZgc,7646 -flask/json/tag.py,sha256=DhaNwuIOhdt2R74oOC9Y4Z8ZprxFYiRb5dUP5byyINw,9281 -flask/logging.py,sha256=8sM3WMTubi1cBb2c_lPkWpN0J8dMAqrgKRYLLi1dCVI,2377 -flask/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask/sansio/README.md,sha256=-0X1tECnilmz1cogx-YhNw5d7guK7GKrq_DEV2OzlU0,228 -flask/sansio/__pycache__/app.cpython-312.pyc,, -flask/sansio/__pycache__/blueprints.cpython-312.pyc,, -flask/sansio/__pycache__/scaffold.cpython-312.pyc,, -flask/sansio/app.py,sha256=YG5Gf7JVf1c0yccWDZ86q5VSfJUidOVp27HFxFNxC7U,38053 -flask/sansio/blueprints.py,sha256=Tqe-7EkZ-tbWchm8iDoCfD848f0_3nLv6NNjeIPvHwM,24637 -flask/sansio/scaffold.py,sha256=WLV9TRQMMhGlXz-1OKtQ3lv6mtIBQZxdW2HezYrGxoI,30633 -flask/sessions.py,sha256=RU4lzm9MQW9CtH8rVLRTDm8USMJyT4LbvYe7sxM2__k,14807 -flask/signals.py,sha256=V7lMUww7CqgJ2ThUBn1PiatZtQanOyt7OZpu2GZI-34,750 -flask/templating.py,sha256=2TcXLT85Asflm2W9WOSFxKCmYn5e49w_Jkg9-NaaJWo,7537 -flask/testing.py,sha256=3BFXb3bP7R5r-XLBuobhczbxDu8-1LWRzYuhbr-lwaE,10163 -flask/typing.py,sha256=ZavK-wV28Yv8CQB7u73qZp_jLalpbWdrXS37QR1ftN0,3190 -flask/views.py,sha256=B66bTvYBBcHMYk4dA1ScZD0oTRTBl0I5smp1lRm9riI,6939 -flask/wrappers.py,sha256=m1j5tIJxIu8_sPPgTAB_G4TTh52Q-HoDuw_qHV5J59g,5831 diff --git a/myenv/Lib/site-packages/flask-3.0.3.dist-info/REQUESTED b/myenv/Lib/site-packages/flask-3.0.3.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/flask-3.0.3.dist-info/WHEEL b/myenv/Lib/site-packages/flask-3.0.3.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/myenv/Lib/site-packages/flask-3.0.3.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/flask-3.0.3.dist-info/entry_points.txt b/myenv/Lib/site-packages/flask-3.0.3.dist-info/entry_points.txt deleted file mode 100644 index eec6733..0000000 --- a/myenv/Lib/site-packages/flask-3.0.3.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[console_scripts] -flask=flask.cli:main - diff --git a/myenv/Lib/site-packages/flask/__init__.py b/myenv/Lib/site-packages/flask/__init__.py deleted file mode 100644 index e86eb43..0000000 --- a/myenv/Lib/site-packages/flask/__init__.py +++ /dev/null @@ -1,60 +0,0 @@ -from __future__ import annotations - -import typing as t - -from . import json as json -from .app import Flask as Flask -from .blueprints import Blueprint as Blueprint -from .config import Config as Config -from .ctx import after_this_request as after_this_request -from .ctx import copy_current_request_context as copy_current_request_context -from .ctx import has_app_context as has_app_context -from .ctx import has_request_context as has_request_context -from .globals import current_app as current_app -from .globals import g as g -from .globals import request as request -from .globals import session as session -from .helpers import abort as abort -from .helpers import flash as flash -from .helpers import get_flashed_messages as get_flashed_messages -from .helpers import get_template_attribute as get_template_attribute -from .helpers import make_response as make_response -from .helpers import redirect as redirect -from .helpers import send_file as send_file -from .helpers import send_from_directory as send_from_directory -from .helpers import stream_with_context as stream_with_context -from .helpers import url_for as url_for -from .json import jsonify as jsonify -from .signals import appcontext_popped as appcontext_popped -from .signals import appcontext_pushed as appcontext_pushed -from .signals import appcontext_tearing_down as appcontext_tearing_down -from .signals import before_render_template as before_render_template -from .signals import got_request_exception as got_request_exception -from .signals import message_flashed as message_flashed -from .signals import request_finished as request_finished -from .signals import request_started as request_started -from .signals import request_tearing_down as request_tearing_down -from .signals import template_rendered as template_rendered -from .templating import render_template as render_template -from .templating import render_template_string as render_template_string -from .templating import stream_template as stream_template -from .templating import stream_template_string as stream_template_string -from .wrappers import Request as Request -from .wrappers import Response as Response - - -def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Flask 3.1. Use feature detection or" - " 'importlib.metadata.version(\"flask\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("flask") - - raise AttributeError(name) diff --git a/myenv/Lib/site-packages/flask/__main__.py b/myenv/Lib/site-packages/flask/__main__.py deleted file mode 100644 index 4e28416..0000000 --- a/myenv/Lib/site-packages/flask/__main__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .cli import main - -main() diff --git a/myenv/Lib/site-packages/flask/app.py b/myenv/Lib/site-packages/flask/app.py deleted file mode 100644 index 7622b5e..0000000 --- a/myenv/Lib/site-packages/flask/app.py +++ /dev/null @@ -1,1498 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import os -import sys -import typing as t -import weakref -from datetime import timedelta -from inspect import iscoroutinefunction -from itertools import chain -from types import TracebackType -from urllib.parse import quote as _url_quote - -import click -from werkzeug.datastructures import Headers -from werkzeug.datastructures import ImmutableDict -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.exceptions import HTTPException -from werkzeug.exceptions import InternalServerError -from werkzeug.routing import BuildError -from werkzeug.routing import MapAdapter -from werkzeug.routing import RequestRedirect -from werkzeug.routing import RoutingException -from werkzeug.routing import Rule -from werkzeug.serving import is_running_from_reloader -from werkzeug.wrappers import Response as BaseResponse - -from . import cli -from . import typing as ft -from .ctx import AppContext -from .ctx import RequestContext -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import g -from .globals import request -from .globals import request_ctx -from .globals import session -from .helpers import get_debug_flag -from .helpers import get_flashed_messages -from .helpers import get_load_dotenv -from .helpers import send_from_directory -from .sansio.app import App -from .sansio.scaffold import _sentinel -from .sessions import SecureCookieSessionInterface -from .sessions import SessionInterface -from .signals import appcontext_tearing_down -from .signals import got_request_exception -from .signals import request_finished -from .signals import request_started -from .signals import request_tearing_down -from .templating import Environment -from .wrappers import Request -from .wrappers import Response - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIEnvironment - - from .testing import FlaskClient - from .testing import FlaskCliRunner - -T_shell_context_processor = t.TypeVar( - "T_shell_context_processor", bound=ft.ShellContextProcessorCallable -) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) - - -def _make_timedelta(value: timedelta | int | None) -> timedelta | None: - if value is None or isinstance(value, timedelta): - return value - - return timedelta(seconds=value) - - -class Flask(App): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: The folder with static files that is served at - ``static_url_path``. Relative to the application ``root_path`` - or an absolute path. Defaults to ``'static'``. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: The path to the root of the application files. - This should only be set manually when it can't be detected - automatically, such as for namespace packages. - """ - - default_config = ImmutableDict( - { - "DEBUG": None, - "TESTING": False, - "PROPAGATE_EXCEPTIONS": None, - "SECRET_KEY": None, - "PERMANENT_SESSION_LIFETIME": timedelta(days=31), - "USE_X_SENDFILE": False, - "SERVER_NAME": None, - "APPLICATION_ROOT": "/", - "SESSION_COOKIE_NAME": "session", - "SESSION_COOKIE_DOMAIN": None, - "SESSION_COOKIE_PATH": None, - "SESSION_COOKIE_HTTPONLY": True, - "SESSION_COOKIE_SECURE": False, - "SESSION_COOKIE_SAMESITE": None, - "SESSION_REFRESH_EACH_REQUEST": True, - "MAX_CONTENT_LENGTH": None, - "SEND_FILE_MAX_AGE_DEFAULT": None, - "TRAP_BAD_REQUEST_ERRORS": None, - "TRAP_HTTP_EXCEPTIONS": False, - "EXPLAIN_TEMPLATE_LOADING": False, - "PREFERRED_URL_SCHEME": "http", - "TEMPLATES_AUTO_RELOAD": None, - "MAX_COOKIE_SIZE": 4093, - } - ) - - #: The class that is used for request objects. See :class:`~flask.Request` - #: for more information. - request_class: type[Request] = Request - - #: The class that is used for response objects. See - #: :class:`~flask.Response` for more information. - response_class: type[Response] = Response - - #: the session interface to use. By default an instance of - #: :class:`~flask.sessions.SecureCookieSessionInterface` is used here. - #: - #: .. versionadded:: 0.8 - session_interface: SessionInterface = SecureCookieSessionInterface() - - def __init__( - self, - import_name: str, - static_url_path: str | None = None, - static_folder: str | os.PathLike[str] | None = "static", - static_host: str | None = None, - host_matching: bool = False, - subdomain_matching: bool = False, - template_folder: str | os.PathLike[str] | None = "templates", - instance_path: str | None = None, - instance_relative_config: bool = False, - root_path: str | None = None, - ): - super().__init__( - import_name=import_name, - static_url_path=static_url_path, - static_folder=static_folder, - static_host=static_host, - host_matching=host_matching, - subdomain_matching=subdomain_matching, - template_folder=template_folder, - instance_path=instance_path, - instance_relative_config=instance_relative_config, - root_path=root_path, - ) - - #: The Click command group for registering CLI commands for this - #: object. The commands are available from the ``flask`` command - #: once the application has been discovered and blueprints have - #: been registered. - self.cli = cli.AppGroup() - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - # Add a static route using the provided static_url_path, static_host, - # and static_folder if there is a configured static_folder. - # Note we do this without checking if static_folder exists. - # For one, it might be created while the server is running (e.g. during - # development). Also, Google App Engine stores static files somewhere - if self.has_static_folder: - assert ( - bool(static_host) == host_matching - ), "Invalid static_host/host_matching combination" - # Use a weakref to avoid creating a reference cycle between the app - # and the view function (see #3761). - self_ref = weakref.ref(self) - self.add_url_rule( - f"{self.static_url_path}/", - endpoint="static", - host=static_host, - view_func=lambda **kw: self_ref().send_static_file(**kw), # type: ignore # noqa: B950 - ) - - def get_send_file_max_age(self, filename: str | None) -> int | None: - """Used by :func:`send_file` to determine the ``max_age`` cache - value for a given file path if it wasn't passed. - - By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from - the configuration of :data:`~flask.current_app`. This defaults - to ``None``, which tells the browser to use conditional requests - instead of a timed cache, which is usually preferable. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionchanged:: 2.0 - The default configuration is ``None`` instead of 12 hours. - - .. versionadded:: 0.9 - """ - value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"] - - if value is None: - return None - - if isinstance(value, timedelta): - return int(value.total_seconds()) - - return value # type: ignore[no-any-return] - - def send_static_file(self, filename: str) -> Response: - """The view function used to serve files from - :attr:`static_folder`. A route is automatically registered for - this view at :attr:`static_url_path` if :attr:`static_folder` is - set. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionadded:: 0.5 - - """ - if not self.has_static_folder: - raise RuntimeError("'static_folder' must be set to serve static_files.") - - # send_file only knows to call get_send_file_max_age on the app, - # call it here so it works for blueprints too. - max_age = self.get_send_file_max_age(filename) - return send_from_directory( - t.cast(str, self.static_folder), filename, max_age=max_age - ) - - def open_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: - """Open a resource file relative to :attr:`root_path` for - reading. - - For example, if the file ``schema.sql`` is next to the file - ``app.py`` where the ``Flask`` app is defined, it can be opened - with: - - .. code-block:: python - - with app.open_resource("schema.sql") as f: - conn.executescript(f.read()) - - :param resource: Path to the resource relative to - :attr:`root_path`. - :param mode: Open the file in this mode. Only reading is - supported, valid values are "r" (or "rt") and "rb". - - Note this is a duplicate of the same method in the Flask - class. - - """ - if mode not in {"r", "rt", "rb"}: - raise ValueError("Resources can only be opened for reading.") - - return open(os.path.join(self.root_path, resource), mode) - - def open_instance_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: - """Opens a resource from the application's instance folder - (:attr:`instance_path`). Otherwise works like - :meth:`open_resource`. Instance resources can also be opened for - writing. - - :param resource: the name of the resource. To access resources within - subfolders use forward slashes as separator. - :param mode: resource file opening mode, default is 'rb'. - """ - return open(os.path.join(self.instance_path, resource), mode) - - def create_jinja_environment(self) -> Environment: - """Create the Jinja environment based on :attr:`jinja_options` - and the various Jinja-related methods of the app. Changing - :attr:`jinja_options` after this will have no effect. Also adds - Flask-related globals and filters to the environment. - - .. versionchanged:: 0.11 - ``Environment.auto_reload`` set in accordance with - ``TEMPLATES_AUTO_RELOAD`` configuration option. - - .. versionadded:: 0.5 - """ - options = dict(self.jinja_options) - - if "autoescape" not in options: - options["autoescape"] = self.select_jinja_autoescape - - if "auto_reload" not in options: - auto_reload = self.config["TEMPLATES_AUTO_RELOAD"] - - if auto_reload is None: - auto_reload = self.debug - - options["auto_reload"] = auto_reload - - rv = self.jinja_environment(self, **options) - rv.globals.update( - url_for=self.url_for, - get_flashed_messages=get_flashed_messages, - config=self.config, - # request, session and g are normally added with the - # context processor for efficiency reasons but for imported - # templates we also want the proxies in there. - request=request, - session=session, - g=g, - ) - rv.policies["json.dumps_function"] = self.json.dumps - return rv - - def create_url_adapter(self, request: Request | None) -> MapAdapter | None: - """Creates a URL adapter for the given request. The URL adapter - is created at a point where the request context is not yet set - up so the request is passed explicitly. - - .. versionadded:: 0.6 - - .. versionchanged:: 0.9 - This can now also be called without a request object when the - URL adapter is created for the application context. - - .. versionchanged:: 1.0 - :data:`SERVER_NAME` no longer implicitly enables subdomain - matching. Use :attr:`subdomain_matching` instead. - """ - if request is not None: - # If subdomain matching is disabled (the default), use the - # default subdomain in all cases. This should be the default - # in Werkzeug but it currently does not have that feature. - if not self.subdomain_matching: - subdomain = self.url_map.default_subdomain or None - else: - subdomain = None - - return self.url_map.bind_to_environ( - request.environ, - server_name=self.config["SERVER_NAME"], - subdomain=subdomain, - ) - # We need at the very least the server name to be set for this - # to work. - if self.config["SERVER_NAME"] is not None: - return self.url_map.bind( - self.config["SERVER_NAME"], - script_name=self.config["APPLICATION_ROOT"], - url_scheme=self.config["PREFERRED_URL_SCHEME"], - ) - - return None - - def raise_routing_exception(self, request: Request) -> t.NoReturn: - """Intercept routing exceptions and possibly do something else. - - In debug mode, intercept a routing redirect and replace it with - an error if the body will be discarded. - - With modern Werkzeug this shouldn't occur, since it now uses a - 308 status which tells the browser to resend the method and - body. - - .. versionchanged:: 2.1 - Don't intercept 307 and 308 redirects. - - :meta private: - :internal: - """ - if ( - not self.debug - or not isinstance(request.routing_exception, RequestRedirect) - or request.routing_exception.code in {307, 308} - or request.method in {"GET", "HEAD", "OPTIONS"} - ): - raise request.routing_exception # type: ignore[misc] - - from .debughelpers import FormDataRoutingRedirect - - raise FormDataRoutingRedirect(request) - - def update_template_context(self, context: dict[str, t.Any]) -> None: - """Update the template context with some commonly used variables. - This injects request, session, config and g into the template - context as well as everything template context processors want - to inject. Note that the as of Flask 0.6, the original values - in the context will not be overridden if a context processor - decides to return a value with the same key. - - :param context: the context as a dictionary that is updated in place - to add extra variables. - """ - names: t.Iterable[str | None] = (None,) - - # A template may be rendered outside a request context. - if request: - names = chain(names, reversed(request.blueprints)) - - # The values passed to render_template take precedence. Keep a - # copy to re-apply after all context functions. - orig_ctx = context.copy() - - for name in names: - if name in self.template_context_processors: - for func in self.template_context_processors[name]: - context.update(self.ensure_sync(func)()) - - context.update(orig_ctx) - - def make_shell_context(self) -> dict[str, t.Any]: - """Returns the shell context for an interactive shell for this - application. This runs all the registered shell context - processors. - - .. versionadded:: 0.11 - """ - rv = {"app": self, "g": g} - for processor in self.shell_context_processors: - rv.update(processor()) - return rv - - def run( - self, - host: str | None = None, - port: int | None = None, - debug: bool | None = None, - load_dotenv: bool = True, - **options: t.Any, - ) -> None: - """Runs the application on a local development server. - - Do not use ``run()`` in a production setting. It is not intended to - meet security and performance requirements for a production server. - Instead, see :doc:`/deploying/index` for WSGI server recommendations. - - If the :attr:`debug` flag is set the server will automatically reload - for code changes and show a debugger in case an exception happened. - - If you want to run the application in debug mode, but disable the - code execution on the interactive debugger, you can pass - ``use_evalex=False`` as parameter. This will keep the debugger's - traceback screen active, but disable code execution. - - It is not recommended to use this function for development with - automatic reloading as this is badly supported. Instead you should - be using the :command:`flask` command line script's ``run`` support. - - .. admonition:: Keep in Mind - - Flask will suppress any server error with a generic error page - unless it is in debug mode. As such to enable just the - interactive debugger without the code reloading, you have to - invoke :meth:`run` with ``debug=True`` and ``use_reloader=False``. - Setting ``use_debugger`` to ``True`` without being in debug mode - won't catch any exceptions because there won't be any to - catch. - - :param host: the hostname to listen on. Set this to ``'0.0.0.0'`` to - have the server available externally as well. Defaults to - ``'127.0.0.1'`` or the host in the ``SERVER_NAME`` config variable - if present. - :param port: the port of the webserver. Defaults to ``5000`` or the - port defined in the ``SERVER_NAME`` config variable if present. - :param debug: if given, enable or disable debug mode. See - :attr:`debug`. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param options: the options to be forwarded to the underlying Werkzeug - server. See :func:`werkzeug.serving.run_simple` for more - information. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment - variables from :file:`.env` and :file:`.flaskenv` files. - - The :envvar:`FLASK_DEBUG` environment variable will override :attr:`debug`. - - Threaded mode is enabled by default. - - .. versionchanged:: 0.10 - The default port is now picked from the ``SERVER_NAME`` - variable. - """ - # Ignore this call so that it doesn't start another server if - # the 'flask run' command is used. - if os.environ.get("FLASK_RUN_FROM_CLI") == "true": - if not is_running_from_reloader(): - click.secho( - " * Ignoring a call to 'app.run()' that would block" - " the current 'flask' CLI command.\n" - " Only call 'app.run()' in an 'if __name__ ==" - ' "__main__"\' guard.', - fg="red", - ) - - return - - if get_load_dotenv(load_dotenv): - cli.load_dotenv() - - # if set, env var overrides existing value - if "FLASK_DEBUG" in os.environ: - self.debug = get_debug_flag() - - # debug passed to method overrides all other sources - if debug is not None: - self.debug = bool(debug) - - server_name = self.config.get("SERVER_NAME") - sn_host = sn_port = None - - if server_name: - sn_host, _, sn_port = server_name.partition(":") - - if not host: - if sn_host: - host = sn_host - else: - host = "127.0.0.1" - - if port or port == 0: - port = int(port) - elif sn_port: - port = int(sn_port) - else: - port = 5000 - - options.setdefault("use_reloader", self.debug) - options.setdefault("use_debugger", self.debug) - options.setdefault("threaded", True) - - cli.show_server_banner(self.debug, self.name) - - from werkzeug.serving import run_simple - - try: - run_simple(t.cast(str, host), port, self, **options) - finally: - # reset the first request information if the development server - # reset normally. This makes it possible to restart the server - # without reloader and that stuff from an interactive shell. - self._got_first_request = False - - def test_client(self, use_cookies: bool = True, **kwargs: t.Any) -> FlaskClient: - """Creates a test client for this application. For information - about unit testing head over to :doc:`/testing`. - - Note that if you are testing for assertions or exceptions in your - application code, you must set ``app.testing = True`` in order for the - exceptions to propagate to the test client. Otherwise, the exception - will be handled by the application (not visible to the test client) and - the only indication of an AssertionError or other exception will be a - 500 status code response to the test client. See the :attr:`testing` - attribute. For example:: - - app.testing = True - client = app.test_client() - - The test client can be used in a ``with`` block to defer the closing down - of the context until the end of the ``with`` block. This is useful if - you want to access the context locals for testing:: - - with app.test_client() as c: - rv = c.get('/?vodka=42') - assert request.args['vodka'] == '42' - - Additionally, you may pass optional keyword arguments that will then - be passed to the application's :attr:`test_client_class` constructor. - For example:: - - from flask.testing import FlaskClient - - class CustomClient(FlaskClient): - def __init__(self, *args, **kwargs): - self._authentication = kwargs.pop("authentication") - super(CustomClient,self).__init__( *args, **kwargs) - - app.test_client_class = CustomClient - client = app.test_client(authentication='Basic ....') - - See :class:`~flask.testing.FlaskClient` for more information. - - .. versionchanged:: 0.4 - added support for ``with`` block usage for the client. - - .. versionadded:: 0.7 - The `use_cookies` parameter was added as well as the ability - to override the client to be used by setting the - :attr:`test_client_class` attribute. - - .. versionchanged:: 0.11 - Added `**kwargs` to support passing additional keyword arguments to - the constructor of :attr:`test_client_class`. - """ - cls = self.test_client_class - if cls is None: - from .testing import FlaskClient as cls - return cls( # type: ignore - self, self.response_class, use_cookies=use_cookies, **kwargs - ) - - def test_cli_runner(self, **kwargs: t.Any) -> FlaskCliRunner: - """Create a CLI runner for testing CLI commands. - See :ref:`testing-cli`. - - Returns an instance of :attr:`test_cli_runner_class`, by default - :class:`~flask.testing.FlaskCliRunner`. The Flask app object is - passed as the first argument. - - .. versionadded:: 1.0 - """ - cls = self.test_cli_runner_class - - if cls is None: - from .testing import FlaskCliRunner as cls - - return cls(self, **kwargs) # type: ignore - - def handle_http_exception( - self, e: HTTPException - ) -> HTTPException | ft.ResponseReturnValue: - """Handles an HTTP exception. By default this will invoke the - registered error handlers and fall back to returning the - exception as response. - - .. versionchanged:: 1.0.3 - ``RoutingException``, used internally for actions such as - slash redirects during routing, is not passed to error - handlers. - - .. versionchanged:: 1.0 - Exceptions are looked up by code *and* by MRO, so - ``HTTPException`` subclasses can be handled with a catch-all - handler for the base ``HTTPException``. - - .. versionadded:: 0.3 - """ - # Proxy exceptions don't have error codes. We want to always return - # those unchanged as errors - if e.code is None: - return e - - # RoutingExceptions are used internally to trigger routing - # actions, such as slash redirects raising RequestRedirect. They - # are not raised or handled in user code. - if isinstance(e, RoutingException): - return e - - handler = self._find_error_handler(e, request.blueprints) - if handler is None: - return e - return self.ensure_sync(handler)(e) # type: ignore[no-any-return] - - def handle_user_exception( - self, e: Exception - ) -> HTTPException | ft.ResponseReturnValue: - """This method is called whenever an exception occurs that - should be handled. A special case is :class:`~werkzeug - .exceptions.HTTPException` which is forwarded to the - :meth:`handle_http_exception` method. This function will either - return a response value or reraise the exception with the same - traceback. - - .. versionchanged:: 1.0 - Key errors raised from request data like ``form`` show the - bad key in debug mode rather than a generic bad request - message. - - .. versionadded:: 0.7 - """ - if isinstance(e, BadRequestKeyError) and ( - self.debug or self.config["TRAP_BAD_REQUEST_ERRORS"] - ): - e.show_exception = True - - if isinstance(e, HTTPException) and not self.trap_http_exception(e): - return self.handle_http_exception(e) - - handler = self._find_error_handler(e, request.blueprints) - - if handler is None: - raise - - return self.ensure_sync(handler)(e) # type: ignore[no-any-return] - - def handle_exception(self, e: Exception) -> Response: - """Handle an exception that did not have an error handler - associated with it, or that was raised from an error handler. - This always causes a 500 ``InternalServerError``. - - Always sends the :data:`got_request_exception` signal. - - If :data:`PROPAGATE_EXCEPTIONS` is ``True``, such as in debug - mode, the error will be re-raised so that the debugger can - display it. Otherwise, the original exception is logged, and - an :exc:`~werkzeug.exceptions.InternalServerError` is returned. - - If an error handler is registered for ``InternalServerError`` or - ``500``, it will be used. For consistency, the handler will - always receive the ``InternalServerError``. The original - unhandled exception is available as ``e.original_exception``. - - .. versionchanged:: 1.1.0 - Always passes the ``InternalServerError`` instance to the - handler, setting ``original_exception`` to the unhandled - error. - - .. versionchanged:: 1.1.0 - ``after_request`` functions and other finalization is done - even for the default 500 response when there is no handler. - - .. versionadded:: 0.3 - """ - exc_info = sys.exc_info() - got_request_exception.send(self, _async_wrapper=self.ensure_sync, exception=e) - propagate = self.config["PROPAGATE_EXCEPTIONS"] - - if propagate is None: - propagate = self.testing or self.debug - - if propagate: - # Re-raise if called with an active exception, otherwise - # raise the passed in exception. - if exc_info[1] is e: - raise - - raise e - - self.log_exception(exc_info) - server_error: InternalServerError | ft.ResponseReturnValue - server_error = InternalServerError(original_exception=e) - handler = self._find_error_handler(server_error, request.blueprints) - - if handler is not None: - server_error = self.ensure_sync(handler)(server_error) - - return self.finalize_request(server_error, from_error_handler=True) - - def log_exception( - self, - exc_info: (tuple[type, BaseException, TracebackType] | tuple[None, None, None]), - ) -> None: - """Logs an exception. This is called by :meth:`handle_exception` - if debugging is disabled and right before the handler is called. - The default implementation logs the exception as error on the - :attr:`logger`. - - .. versionadded:: 0.8 - """ - self.logger.error( - f"Exception on {request.path} [{request.method}]", exc_info=exc_info - ) - - def dispatch_request(self) -> ft.ResponseReturnValue: - """Does the request dispatching. Matches the URL and returns the - return value of the view or error handler. This does not have to - be a response object. In order to convert the return value to a - proper response object, call :func:`make_response`. - - .. versionchanged:: 0.7 - This no longer does the exception handling, this code was - moved to the new :meth:`full_dispatch_request`. - """ - req = request_ctx.request - if req.routing_exception is not None: - self.raise_routing_exception(req) - rule: Rule = req.url_rule # type: ignore[assignment] - # if we provide automatic options for this URL and the - # request came with the OPTIONS method, reply automatically - if ( - getattr(rule, "provide_automatic_options", False) - and req.method == "OPTIONS" - ): - return self.make_default_options_response() - # otherwise dispatch to the handler for that endpoint - view_args: dict[str, t.Any] = req.view_args # type: ignore[assignment] - return self.ensure_sync(self.view_functions[rule.endpoint])(**view_args) # type: ignore[no-any-return] - - def full_dispatch_request(self) -> Response: - """Dispatches the request and on top of that performs request - pre and postprocessing as well as HTTP exception catching and - error handling. - - .. versionadded:: 0.7 - """ - self._got_first_request = True - - try: - request_started.send(self, _async_wrapper=self.ensure_sync) - rv = self.preprocess_request() - if rv is None: - rv = self.dispatch_request() - except Exception as e: - rv = self.handle_user_exception(e) - return self.finalize_request(rv) - - def finalize_request( - self, - rv: ft.ResponseReturnValue | HTTPException, - from_error_handler: bool = False, - ) -> Response: - """Given the return value from a view function this finalizes - the request by converting it into a response and invoking the - postprocessing functions. This is invoked for both normal - request dispatching as well as error handlers. - - Because this means that it might be called as a result of a - failure a special safe mode is available which can be enabled - with the `from_error_handler` flag. If enabled, failures in - response processing will be logged and otherwise ignored. - - :internal: - """ - response = self.make_response(rv) - try: - response = self.process_response(response) - request_finished.send( - self, _async_wrapper=self.ensure_sync, response=response - ) - except Exception: - if not from_error_handler: - raise - self.logger.exception( - "Request finalizing failed with an error while handling an error" - ) - return response - - def make_default_options_response(self) -> Response: - """This method is called to create the default ``OPTIONS`` response. - This can be changed through subclassing to change the default - behavior of ``OPTIONS`` responses. - - .. versionadded:: 0.7 - """ - adapter = request_ctx.url_adapter - methods = adapter.allowed_methods() # type: ignore[union-attr] - rv = self.response_class() - rv.allow.update(methods) - return rv - - def ensure_sync(self, func: t.Callable[..., t.Any]) -> t.Callable[..., t.Any]: - """Ensure that the function is synchronous for WSGI workers. - Plain ``def`` functions are returned as-is. ``async def`` - functions are wrapped to run and wait for the response. - - Override this method to change how the app runs async views. - - .. versionadded:: 2.0 - """ - if iscoroutinefunction(func): - return self.async_to_sync(func) - - return func - - def async_to_sync( - self, func: t.Callable[..., t.Coroutine[t.Any, t.Any, t.Any]] - ) -> t.Callable[..., t.Any]: - """Return a sync function that will run the coroutine function. - - .. code-block:: python - - result = app.async_to_sync(func)(*args, **kwargs) - - Override this method to change how the app converts async code - to be synchronously callable. - - .. versionadded:: 2.0 - """ - try: - from asgiref.sync import async_to_sync as asgiref_async_to_sync - except ImportError: - raise RuntimeError( - "Install Flask with the 'async' extra in order to use async views." - ) from None - - return asgiref_async_to_sync(func) - - def url_for( - self, - /, - endpoint: str, - *, - _anchor: str | None = None, - _method: str | None = None, - _scheme: str | None = None, - _external: bool | None = None, - **values: t.Any, - ) -> str: - """Generate a URL to the given endpoint with the given values. - - This is called by :func:`flask.url_for`, and can be called - directly as well. - - An *endpoint* is the name of a URL rule, usually added with - :meth:`@app.route() `, and usually the same name as the - view function. A route defined in a :class:`~flask.Blueprint` - will prepend the blueprint's name separated by a ``.`` to the - endpoint. - - In some cases, such as email messages, you want URLs to include - the scheme and domain, like ``https://example.com/hello``. When - not in an active request, URLs will be external by default, but - this requires setting :data:`SERVER_NAME` so Flask knows what - domain to use. :data:`APPLICATION_ROOT` and - :data:`PREFERRED_URL_SCHEME` should also be configured as - needed. This config is only used when not in an active request. - - Functions can be decorated with :meth:`url_defaults` to modify - keyword arguments before the URL is built. - - If building fails for some reason, such as an unknown endpoint - or incorrect values, the app's :meth:`handle_url_build_error` - method is called. If that returns a string, that is returned, - otherwise a :exc:`~werkzeug.routing.BuildError` is raised. - - :param endpoint: The endpoint name associated with the URL to - generate. If this starts with a ``.``, the current blueprint - name (if any) will be used. - :param _anchor: If given, append this as ``#anchor`` to the URL. - :param _method: If given, generate the URL associated with this - method for the endpoint. - :param _scheme: If given, the URL will have this scheme if it - is external. - :param _external: If given, prefer the URL to be internal - (False) or require it to be external (True). External URLs - include the scheme and domain. When not in an active - request, URLs are external by default. - :param values: Values to use for the variable parts of the URL - rule. Unknown keys are appended as query string arguments, - like ``?a=b&c=d``. - - .. versionadded:: 2.2 - Moved from ``flask.url_for``, which calls this method. - """ - req_ctx = _cv_request.get(None) - - if req_ctx is not None: - url_adapter = req_ctx.url_adapter - blueprint_name = req_ctx.request.blueprint - - # If the endpoint starts with "." and the request matches a - # blueprint, the endpoint is relative to the blueprint. - if endpoint[:1] == ".": - if blueprint_name is not None: - endpoint = f"{blueprint_name}{endpoint}" - else: - endpoint = endpoint[1:] - - # When in a request, generate a URL without scheme and - # domain by default, unless a scheme is given. - if _external is None: - _external = _scheme is not None - else: - app_ctx = _cv_app.get(None) - - # If called by helpers.url_for, an app context is active, - # use its url_adapter. Otherwise, app.url_for was called - # directly, build an adapter. - if app_ctx is not None: - url_adapter = app_ctx.url_adapter - else: - url_adapter = self.create_url_adapter(None) - - if url_adapter is None: - raise RuntimeError( - "Unable to build URLs outside an active request" - " without 'SERVER_NAME' configured. Also configure" - " 'APPLICATION_ROOT' and 'PREFERRED_URL_SCHEME' as" - " needed." - ) - - # When outside a request, generate a URL with scheme and - # domain by default. - if _external is None: - _external = True - - # It is an error to set _scheme when _external=False, in order - # to avoid accidental insecure URLs. - if _scheme is not None and not _external: - raise ValueError("When specifying '_scheme', '_external' must be True.") - - self.inject_url_defaults(endpoint, values) - - try: - rv = url_adapter.build( # type: ignore[union-attr] - endpoint, - values, - method=_method, - url_scheme=_scheme, - force_external=_external, - ) - except BuildError as error: - values.update( - _anchor=_anchor, _method=_method, _scheme=_scheme, _external=_external - ) - return self.handle_url_build_error(error, endpoint, values) - - if _anchor is not None: - _anchor = _url_quote(_anchor, safe="%!#$&'()*+,/:;=?@") - rv = f"{rv}#{_anchor}" - - return rv - - def make_response(self, rv: ft.ResponseReturnValue) -> Response: - """Convert the return value from a view function to an instance of - :attr:`response_class`. - - :param rv: the return value from the view function. The view function - must return a response. Returning ``None``, or the view ending - without returning, is not allowed. The following types are allowed - for ``view_rv``: - - ``str`` - A response object is created with the string encoded to UTF-8 - as the body. - - ``bytes`` - A response object is created with the bytes as the body. - - ``dict`` - A dictionary that will be jsonify'd before being returned. - - ``list`` - A list that will be jsonify'd before being returned. - - ``generator`` or ``iterator`` - A generator that returns ``str`` or ``bytes`` to be - streamed as the response. - - ``tuple`` - Either ``(body, status, headers)``, ``(body, status)``, or - ``(body, headers)``, where ``body`` is any of the other types - allowed here, ``status`` is a string or an integer, and - ``headers`` is a dictionary or a list of ``(key, value)`` - tuples. If ``body`` is a :attr:`response_class` instance, - ``status`` overwrites the exiting value and ``headers`` are - extended. - - :attr:`response_class` - The object is returned unchanged. - - other :class:`~werkzeug.wrappers.Response` class - The object is coerced to :attr:`response_class`. - - :func:`callable` - The function is called as a WSGI application. The result is - used to create a response object. - - .. versionchanged:: 2.2 - A generator will be converted to a streaming response. - A list will be converted to a JSON response. - - .. versionchanged:: 1.1 - A dict will be converted to a JSON response. - - .. versionchanged:: 0.9 - Previously a tuple was interpreted as the arguments for the - response object. - """ - - status = headers = None - - # unpack tuple returns - if isinstance(rv, tuple): - len_rv = len(rv) - - # a 3-tuple is unpacked directly - if len_rv == 3: - rv, status, headers = rv # type: ignore[misc] - # decide if a 2-tuple has status or headers - elif len_rv == 2: - if isinstance(rv[1], (Headers, dict, tuple, list)): - rv, headers = rv - else: - rv, status = rv # type: ignore[assignment,misc] - # other sized tuples are not allowed - else: - raise TypeError( - "The view function did not return a valid response tuple." - " The tuple must have the form (body, status, headers)," - " (body, status), or (body, headers)." - ) - - # the body must not be None - if rv is None: - raise TypeError( - f"The view function for {request.endpoint!r} did not" - " return a valid response. The function either returned" - " None or ended without a return statement." - ) - - # make sure the body is an instance of the response class - if not isinstance(rv, self.response_class): - if isinstance(rv, (str, bytes, bytearray)) or isinstance(rv, cabc.Iterator): - # let the response class set the status and headers instead of - # waiting to do it manually, so that the class can handle any - # special logic - rv = self.response_class( - rv, - status=status, - headers=headers, # type: ignore[arg-type] - ) - status = headers = None - elif isinstance(rv, (dict, list)): - rv = self.json.response(rv) - elif isinstance(rv, BaseResponse) or callable(rv): - # evaluate a WSGI callable, or coerce a different response - # class to the correct type - try: - rv = self.response_class.force_type( - rv, # type: ignore[arg-type] - request.environ, - ) - except TypeError as e: - raise TypeError( - f"{e}\nThe view function did not return a valid" - " response. The return type must be a string," - " dict, list, tuple with headers or status," - " Response instance, or WSGI callable, but it" - f" was a {type(rv).__name__}." - ).with_traceback(sys.exc_info()[2]) from None - else: - raise TypeError( - "The view function did not return a valid" - " response. The return type must be a string," - " dict, list, tuple with headers or status," - " Response instance, or WSGI callable, but it was a" - f" {type(rv).__name__}." - ) - - rv = t.cast(Response, rv) - # prefer the status if it was provided - if status is not None: - if isinstance(status, (str, bytes, bytearray)): - rv.status = status - else: - rv.status_code = status - - # extend existing headers with provided headers - if headers: - rv.headers.update(headers) # type: ignore[arg-type] - - return rv - - def preprocess_request(self) -> ft.ResponseReturnValue | None: - """Called before the request is dispatched. Calls - :attr:`url_value_preprocessors` registered with the app and the - current blueprint (if any). Then calls :attr:`before_request_funcs` - registered with the app and the blueprint. - - If any :meth:`before_request` handler returns a non-None value, the - value is handled as if it was the return value from the view, and - further request handling is stopped. - """ - names = (None, *reversed(request.blueprints)) - - for name in names: - if name in self.url_value_preprocessors: - for url_func in self.url_value_preprocessors[name]: - url_func(request.endpoint, request.view_args) - - for name in names: - if name in self.before_request_funcs: - for before_func in self.before_request_funcs[name]: - rv = self.ensure_sync(before_func)() - - if rv is not None: - return rv # type: ignore[no-any-return] - - return None - - def process_response(self, response: Response) -> Response: - """Can be overridden in order to modify the response object - before it's sent to the WSGI server. By default this will - call all the :meth:`after_request` decorated functions. - - .. versionchanged:: 0.5 - As of Flask 0.5 the functions registered for after request - execution are called in reverse order of registration. - - :param response: a :attr:`response_class` object. - :return: a new response object or the same, has to be an - instance of :attr:`response_class`. - """ - ctx = request_ctx._get_current_object() # type: ignore[attr-defined] - - for func in ctx._after_request_functions: - response = self.ensure_sync(func)(response) - - for name in chain(request.blueprints, (None,)): - if name in self.after_request_funcs: - for func in reversed(self.after_request_funcs[name]): - response = self.ensure_sync(func)(response) - - if not self.session_interface.is_null_session(ctx.session): - self.session_interface.save_session(self, ctx.session, response) - - return response - - def do_teardown_request( - self, - exc: BaseException | None = _sentinel, # type: ignore[assignment] - ) -> None: - """Called after the request is dispatched and the response is - returned, right before the request context is popped. - - This calls all functions decorated with - :meth:`teardown_request`, and :meth:`Blueprint.teardown_request` - if a blueprint handled the request. Finally, the - :data:`request_tearing_down` signal is sent. - - This is called by - :meth:`RequestContext.pop() `, - which may be delayed during testing to maintain access to - resources. - - :param exc: An unhandled exception raised while dispatching the - request. Detected from the current exception information if - not passed. Passed to each teardown function. - - .. versionchanged:: 0.9 - Added the ``exc`` argument. - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for name in chain(request.blueprints, (None,)): - if name in self.teardown_request_funcs: - for func in reversed(self.teardown_request_funcs[name]): - self.ensure_sync(func)(exc) - - request_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) - - def do_teardown_appcontext( - self, - exc: BaseException | None = _sentinel, # type: ignore[assignment] - ) -> None: - """Called right before the application context is popped. - - When handling a request, the application context is popped - after the request context. See :meth:`do_teardown_request`. - - This calls all functions decorated with - :meth:`teardown_appcontext`. Then the - :data:`appcontext_tearing_down` signal is sent. - - This is called by - :meth:`AppContext.pop() `. - - .. versionadded:: 0.9 - """ - if exc is _sentinel: - exc = sys.exc_info()[1] - - for func in reversed(self.teardown_appcontext_funcs): - self.ensure_sync(func)(exc) - - appcontext_tearing_down.send(self, _async_wrapper=self.ensure_sync, exc=exc) - - def app_context(self) -> AppContext: - """Create an :class:`~flask.ctx.AppContext`. Use as a ``with`` - block to push the context, which will make :data:`current_app` - point at this application. - - An application context is automatically pushed by - :meth:`RequestContext.push() ` - when handling a request, and when running a CLI command. Use - this to manually create a context outside of these situations. - - :: - - with app.app_context(): - init_db() - - See :doc:`/appcontext`. - - .. versionadded:: 0.9 - """ - return AppContext(self) - - def request_context(self, environ: WSGIEnvironment) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` representing a - WSGI environment. Use a ``with`` block to push the context, - which will make :data:`request` point at this request. - - See :doc:`/reqcontext`. - - Typically you should not call this from your own code. A request - context is automatically pushed by the :meth:`wsgi_app` when - handling a request. Use :meth:`test_request_context` to create - an environment and context instead of this method. - - :param environ: a WSGI environment - """ - return RequestContext(self, environ) - - def test_request_context(self, *args: t.Any, **kwargs: t.Any) -> RequestContext: - """Create a :class:`~flask.ctx.RequestContext` for a WSGI - environment created from the given values. This is mostly useful - during testing, where you may want to run a function that uses - request data without dispatching a full request. - - See :doc:`/reqcontext`. - - Use a ``with`` block to push the context, which will make - :data:`request` point at the request for the created - environment. :: - - with app.test_request_context(...): - generate_report() - - When using the shell, it may be easier to push and pop the - context manually to avoid indentation. :: - - ctx = app.test_request_context(...) - ctx.push() - ... - ctx.pop() - - Takes the same arguments as Werkzeug's - :class:`~werkzeug.test.EnvironBuilder`, with some defaults from - the application. See the linked Werkzeug docs for most of the - available arguments. Flask-specific behavior is listed here. - - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to - :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param data: The request body, either as a string or a dict of - form keys and values. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - from .testing import EnvironBuilder - - builder = EnvironBuilder(self, *args, **kwargs) - - try: - return self.request_context(builder.get_environ()) - finally: - builder.close() - - def wsgi_app( - self, environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - """The actual WSGI application. This is not implemented in - :meth:`__call__` so that middlewares can be applied without - losing a reference to the app object. Instead of doing this:: - - app = MyMiddleware(app) - - It's a better idea to do this instead:: - - app.wsgi_app = MyMiddleware(app.wsgi_app) - - Then you still have the original application object around and - can continue to call methods on it. - - .. versionchanged:: 0.7 - Teardown events for the request and app contexts are called - even if an unhandled error occurs. Other events may not be - called depending on when an error occurs during dispatch. - See :ref:`callbacks-and-errors`. - - :param environ: A WSGI environment. - :param start_response: A callable accepting a status code, - a list of headers, and an optional exception context to - start the response. - """ - ctx = self.request_context(environ) - error: BaseException | None = None - try: - try: - ctx.push() - response = self.full_dispatch_request() - except Exception as e: - error = e - response = self.handle_exception(e) - except: # noqa: B001 - error = sys.exc_info()[1] - raise - return response(environ, start_response) - finally: - if "werkzeug.debug.preserve_context" in environ: - environ["werkzeug.debug.preserve_context"](_cv_app.get()) - environ["werkzeug.debug.preserve_context"](_cv_request.get()) - - if error is not None and self.should_ignore_error(error): - error = None - - ctx.pop(error) - - def __call__( - self, environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - """The WSGI server calls the Flask application object as the - WSGI application. This calls :meth:`wsgi_app`, which can be - wrapped to apply middleware. - """ - return self.wsgi_app(environ, start_response) diff --git a/myenv/Lib/site-packages/flask/blueprints.py b/myenv/Lib/site-packages/flask/blueprints.py deleted file mode 100644 index aa9eacf..0000000 --- a/myenv/Lib/site-packages/flask/blueprints.py +++ /dev/null @@ -1,129 +0,0 @@ -from __future__ import annotations - -import os -import typing as t -from datetime import timedelta - -from .cli import AppGroup -from .globals import current_app -from .helpers import send_from_directory -from .sansio.blueprints import Blueprint as SansioBlueprint -from .sansio.blueprints import BlueprintSetupState as BlueprintSetupState # noqa -from .sansio.scaffold import _sentinel - -if t.TYPE_CHECKING: # pragma: no cover - from .wrappers import Response - - -class Blueprint(SansioBlueprint): - def __init__( - self, - name: str, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - url_prefix: str | None = None, - subdomain: str | None = None, - url_defaults: dict[str, t.Any] | None = None, - root_path: str | None = None, - cli_group: str | None = _sentinel, # type: ignore - ) -> None: - super().__init__( - name, - import_name, - static_folder, - static_url_path, - template_folder, - url_prefix, - subdomain, - url_defaults, - root_path, - cli_group, - ) - - #: The Click command group for registering CLI commands for this - #: object. The commands are available from the ``flask`` command - #: once the application has been discovered and blueprints have - #: been registered. - self.cli = AppGroup() - - # Set the name of the Click group in case someone wants to add - # the app's commands to another CLI tool. - self.cli.name = self.name - - def get_send_file_max_age(self, filename: str | None) -> int | None: - """Used by :func:`send_file` to determine the ``max_age`` cache - value for a given file path if it wasn't passed. - - By default, this returns :data:`SEND_FILE_MAX_AGE_DEFAULT` from - the configuration of :data:`~flask.current_app`. This defaults - to ``None``, which tells the browser to use conditional requests - instead of a timed cache, which is usually preferable. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionchanged:: 2.0 - The default configuration is ``None`` instead of 12 hours. - - .. versionadded:: 0.9 - """ - value = current_app.config["SEND_FILE_MAX_AGE_DEFAULT"] - - if value is None: - return None - - if isinstance(value, timedelta): - return int(value.total_seconds()) - - return value # type: ignore[no-any-return] - - def send_static_file(self, filename: str) -> Response: - """The view function used to serve files from - :attr:`static_folder`. A route is automatically registered for - this view at :attr:`static_url_path` if :attr:`static_folder` is - set. - - Note this is a duplicate of the same method in the Flask - class. - - .. versionadded:: 0.5 - - """ - if not self.has_static_folder: - raise RuntimeError("'static_folder' must be set to serve static_files.") - - # send_file only knows to call get_send_file_max_age on the app, - # call it here so it works for blueprints too. - max_age = self.get_send_file_max_age(filename) - return send_from_directory( - t.cast(str, self.static_folder), filename, max_age=max_age - ) - - def open_resource(self, resource: str, mode: str = "rb") -> t.IO[t.AnyStr]: - """Open a resource file relative to :attr:`root_path` for - reading. - - For example, if the file ``schema.sql`` is next to the file - ``app.py`` where the ``Flask`` app is defined, it can be opened - with: - - .. code-block:: python - - with app.open_resource("schema.sql") as f: - conn.executescript(f.read()) - - :param resource: Path to the resource relative to - :attr:`root_path`. - :param mode: Open the file in this mode. Only reading is - supported, valid values are "r" (or "rt") and "rb". - - Note this is a duplicate of the same method in the Flask - class. - - """ - if mode not in {"r", "rt", "rb"}: - raise ValueError("Resources can only be opened for reading.") - - return open(os.path.join(self.root_path, resource), mode) diff --git a/myenv/Lib/site-packages/flask/cli.py b/myenv/Lib/site-packages/flask/cli.py deleted file mode 100644 index ecb292a..0000000 --- a/myenv/Lib/site-packages/flask/cli.py +++ /dev/null @@ -1,1109 +0,0 @@ -from __future__ import annotations - -import ast -import collections.abc as cabc -import importlib.metadata -import inspect -import os -import platform -import re -import sys -import traceback -import typing as t -from functools import update_wrapper -from operator import itemgetter -from types import ModuleType - -import click -from click.core import ParameterSource -from werkzeug import run_simple -from werkzeug.serving import is_running_from_reloader -from werkzeug.utils import import_string - -from .globals import current_app -from .helpers import get_debug_flag -from .helpers import get_load_dotenv - -if t.TYPE_CHECKING: - import ssl - - from _typeshed.wsgi import StartResponse - from _typeshed.wsgi import WSGIApplication - from _typeshed.wsgi import WSGIEnvironment - - from .app import Flask - - -class NoAppException(click.UsageError): - """Raised if an application cannot be found or loaded.""" - - -def find_best_app(module: ModuleType) -> Flask: - """Given a module instance this tries to find the best possible - application in the module or raises an exception. - """ - from . import Flask - - # Search for the most common names first. - for attr_name in ("app", "application"): - app = getattr(module, attr_name, None) - - if isinstance(app, Flask): - return app - - # Otherwise find the only object that is a Flask instance. - matches = [v for v in module.__dict__.values() if isinstance(v, Flask)] - - if len(matches) == 1: - return matches[0] - elif len(matches) > 1: - raise NoAppException( - "Detected multiple Flask applications in module" - f" '{module.__name__}'. Use '{module.__name__}:name'" - " to specify the correct one." - ) - - # Search for app factory functions. - for attr_name in ("create_app", "make_app"): - app_factory = getattr(module, attr_name, None) - - if inspect.isfunction(app_factory): - try: - app = app_factory() - - if isinstance(app, Flask): - return app - except TypeError as e: - if not _called_with_wrong_args(app_factory): - raise - - raise NoAppException( - f"Detected factory '{attr_name}' in module '{module.__name__}'," - " but could not call it without arguments. Use" - f" '{module.__name__}:{attr_name}(args)'" - " to specify arguments." - ) from e - - raise NoAppException( - "Failed to find Flask application or factory in module" - f" '{module.__name__}'. Use '{module.__name__}:name'" - " to specify one." - ) - - -def _called_with_wrong_args(f: t.Callable[..., Flask]) -> bool: - """Check whether calling a function raised a ``TypeError`` because - the call failed or because something in the factory raised the - error. - - :param f: The function that was called. - :return: ``True`` if the call failed. - """ - tb = sys.exc_info()[2] - - try: - while tb is not None: - if tb.tb_frame.f_code is f.__code__: - # In the function, it was called successfully. - return False - - tb = tb.tb_next - - # Didn't reach the function. - return True - finally: - # Delete tb to break a circular reference. - # https://docs.python.org/2/library/sys.html#sys.exc_info - del tb - - -def find_app_by_string(module: ModuleType, app_name: str) -> Flask: - """Check if the given string is a variable name or a function. Call - a function to get the app instance, or return the variable directly. - """ - from . import Flask - - # Parse app_name as a single expression to determine if it's a valid - # attribute name or function call. - try: - expr = ast.parse(app_name.strip(), mode="eval").body - except SyntaxError: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) from None - - if isinstance(expr, ast.Name): - name = expr.id - args = [] - kwargs = {} - elif isinstance(expr, ast.Call): - # Ensure the function name is an attribute name only. - if not isinstance(expr.func, ast.Name): - raise NoAppException( - f"Function reference must be a simple name: {app_name!r}." - ) - - name = expr.func.id - - # Parse the positional and keyword arguments as literals. - try: - args = [ast.literal_eval(arg) for arg in expr.args] - kwargs = { - kw.arg: ast.literal_eval(kw.value) - for kw in expr.keywords - if kw.arg is not None - } - except ValueError: - # literal_eval gives cryptic error messages, show a generic - # message with the full expression instead. - raise NoAppException( - f"Failed to parse arguments as literal values: {app_name!r}." - ) from None - else: - raise NoAppException( - f"Failed to parse {app_name!r} as an attribute name or function call." - ) - - try: - attr = getattr(module, name) - except AttributeError as e: - raise NoAppException( - f"Failed to find attribute {name!r} in {module.__name__!r}." - ) from e - - # If the attribute is a function, call it with any args and kwargs - # to get the real application. - if inspect.isfunction(attr): - try: - app = attr(*args, **kwargs) - except TypeError as e: - if not _called_with_wrong_args(attr): - raise - - raise NoAppException( - f"The factory {app_name!r} in module" - f" {module.__name__!r} could not be called with the" - " specified arguments." - ) from e - else: - app = attr - - if isinstance(app, Flask): - return app - - raise NoAppException( - "A valid Flask application was not obtained from" - f" '{module.__name__}:{app_name}'." - ) - - -def prepare_import(path: str) -> str: - """Given a filename this will try to calculate the python path, add it - to the search path and return the actual module name that is expected. - """ - path = os.path.realpath(path) - - fname, ext = os.path.splitext(path) - if ext == ".py": - path = fname - - if os.path.basename(path) == "__init__": - path = os.path.dirname(path) - - module_name = [] - - # move up until outside package structure (no __init__.py) - while True: - path, name = os.path.split(path) - module_name.append(name) - - if not os.path.exists(os.path.join(path, "__init__.py")): - break - - if sys.path[0] != path: - sys.path.insert(0, path) - - return ".".join(module_name[::-1]) - - -@t.overload -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: t.Literal[True] = True -) -> Flask: ... - - -@t.overload -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: t.Literal[False] = ... -) -> Flask | None: ... - - -def locate_app( - module_name: str, app_name: str | None, raise_if_not_found: bool = True -) -> Flask | None: - try: - __import__(module_name) - except ImportError: - # Reraise the ImportError if it occurred within the imported module. - # Determine this by checking whether the trace has a depth > 1. - if sys.exc_info()[2].tb_next: # type: ignore[union-attr] - raise NoAppException( - f"While importing {module_name!r}, an ImportError was" - f" raised:\n\n{traceback.format_exc()}" - ) from None - elif raise_if_not_found: - raise NoAppException(f"Could not import {module_name!r}.") from None - else: - return None - - module = sys.modules[module_name] - - if app_name is None: - return find_best_app(module) - else: - return find_app_by_string(module, app_name) - - -def get_version(ctx: click.Context, param: click.Parameter, value: t.Any) -> None: - if not value or ctx.resilient_parsing: - return - - flask_version = importlib.metadata.version("flask") - werkzeug_version = importlib.metadata.version("werkzeug") - - click.echo( - f"Python {platform.python_version()}\n" - f"Flask {flask_version}\n" - f"Werkzeug {werkzeug_version}", - color=ctx.color, - ) - ctx.exit() - - -version_option = click.Option( - ["--version"], - help="Show the Flask version.", - expose_value=False, - callback=get_version, - is_flag=True, - is_eager=True, -) - - -class ScriptInfo: - """Helper object to deal with Flask applications. This is usually not - necessary to interface with as it's used internally in the dispatching - to click. In future versions of Flask this object will most likely play - a bigger role. Typically it's created automatically by the - :class:`FlaskGroup` but you can also manually create it and pass it - onwards as click object. - """ - - def __init__( - self, - app_import_path: str | None = None, - create_app: t.Callable[..., Flask] | None = None, - set_debug_flag: bool = True, - ) -> None: - #: Optionally the import path for the Flask application. - self.app_import_path = app_import_path - #: Optionally a function that is passed the script info to create - #: the instance of the application. - self.create_app = create_app - #: A dictionary with arbitrary data that can be associated with - #: this script info. - self.data: dict[t.Any, t.Any] = {} - self.set_debug_flag = set_debug_flag - self._loaded_app: Flask | None = None - - def load_app(self) -> Flask: - """Loads the Flask app (if not yet loaded) and returns it. Calling - this multiple times will just result in the already loaded app to - be returned. - """ - if self._loaded_app is not None: - return self._loaded_app - - if self.create_app is not None: - app: Flask | None = self.create_app() - else: - if self.app_import_path: - path, name = ( - re.split(r":(?![\\/])", self.app_import_path, maxsplit=1) + [None] - )[:2] - import_name = prepare_import(path) - app = locate_app(import_name, name) - else: - for path in ("wsgi.py", "app.py"): - import_name = prepare_import(path) - app = locate_app(import_name, None, raise_if_not_found=False) - - if app is not None: - break - - if app is None: - raise NoAppException( - "Could not locate a Flask application. Use the" - " 'flask --app' option, 'FLASK_APP' environment" - " variable, or a 'wsgi.py' or 'app.py' file in the" - " current directory." - ) - - if self.set_debug_flag: - # Update the app's debug flag through the descriptor so that - # other values repopulate as well. - app.debug = get_debug_flag() - - self._loaded_app = app - return app - - -pass_script_info = click.make_pass_decorator(ScriptInfo, ensure=True) - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def with_appcontext(f: F) -> F: - """Wraps a callback so that it's guaranteed to be executed with the - script's application context. - - Custom commands (and their options) registered under ``app.cli`` or - ``blueprint.cli`` will always have an app context available, this - decorator is not required in that case. - - .. versionchanged:: 2.2 - The app context is active for subcommands as well as the - decorated callback. The app context is always available to - ``app.cli`` command and parameter callbacks. - """ - - @click.pass_context - def decorator(ctx: click.Context, /, *args: t.Any, **kwargs: t.Any) -> t.Any: - if not current_app: - app = ctx.ensure_object(ScriptInfo).load_app() - ctx.with_resource(app.app_context()) - - return ctx.invoke(f, *args, **kwargs) - - return update_wrapper(decorator, f) # type: ignore[return-value] - - -class AppGroup(click.Group): - """This works similar to a regular click :class:`~click.Group` but it - changes the behavior of the :meth:`command` decorator so that it - automatically wraps the functions in :func:`with_appcontext`. - - Not to be confused with :class:`FlaskGroup`. - """ - - def command( # type: ignore[override] - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], click.Command]: - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it wraps callbacks in :func:`with_appcontext` - unless it's disabled by passing ``with_appcontext=False``. - """ - wrap_for_ctx = kwargs.pop("with_appcontext", True) - - def decorator(f: t.Callable[..., t.Any]) -> click.Command: - if wrap_for_ctx: - f = with_appcontext(f) - return super(AppGroup, self).command(*args, **kwargs)(f) # type: ignore[no-any-return] - - return decorator - - def group( # type: ignore[override] - self, *args: t.Any, **kwargs: t.Any - ) -> t.Callable[[t.Callable[..., t.Any]], click.Group]: - """This works exactly like the method of the same name on a regular - :class:`click.Group` but it defaults the group class to - :class:`AppGroup`. - """ - kwargs.setdefault("cls", AppGroup) - return super().group(*args, **kwargs) # type: ignore[no-any-return] - - -def _set_app(ctx: click.Context, param: click.Option, value: str | None) -> str | None: - if value is None: - return None - - info = ctx.ensure_object(ScriptInfo) - info.app_import_path = value - return value - - -# This option is eager so the app will be available if --help is given. -# --help is also eager, so --app must be before it in the param list. -# no_args_is_help bypasses eager processing, so this option must be -# processed manually in that case to ensure FLASK_APP gets picked up. -_app_option = click.Option( - ["-A", "--app"], - metavar="IMPORT", - help=( - "The Flask application or factory function to load, in the form 'module:name'." - " Module can be a dotted import or file path. Name is not required if it is" - " 'app', 'application', 'create_app', or 'make_app', and can be 'name(args)' to" - " pass arguments." - ), - is_eager=True, - expose_value=False, - callback=_set_app, -) - - -def _set_debug(ctx: click.Context, param: click.Option, value: bool) -> bool | None: - # If the flag isn't provided, it will default to False. Don't use - # that, let debug be set by env in that case. - source = ctx.get_parameter_source(param.name) # type: ignore[arg-type] - - if source is not None and source in ( - ParameterSource.DEFAULT, - ParameterSource.DEFAULT_MAP, - ): - return None - - # Set with env var instead of ScriptInfo.load so that it can be - # accessed early during a factory function. - os.environ["FLASK_DEBUG"] = "1" if value else "0" - return value - - -_debug_option = click.Option( - ["--debug/--no-debug"], - help="Set debug mode.", - expose_value=False, - callback=_set_debug, -) - - -def _env_file_callback( - ctx: click.Context, param: click.Option, value: str | None -) -> str | None: - if value is None: - return None - - import importlib - - try: - importlib.import_module("dotenv") - except ImportError: - raise click.BadParameter( - "python-dotenv must be installed to load an env file.", - ctx=ctx, - param=param, - ) from None - - # Don't check FLASK_SKIP_DOTENV, that only disables automatically - # loading .env and .flaskenv files. - load_dotenv(value) - return value - - -# This option is eager so env vars are loaded as early as possible to be -# used by other options. -_env_file_option = click.Option( - ["-e", "--env-file"], - type=click.Path(exists=True, dir_okay=False), - help="Load environment variables from this file. python-dotenv must be installed.", - is_eager=True, - expose_value=False, - callback=_env_file_callback, -) - - -class FlaskGroup(AppGroup): - """Special subclass of the :class:`AppGroup` group that supports - loading more commands from the configured Flask app. Normally a - developer does not have to interface with this class but there are - some very advanced use cases for which it makes sense to create an - instance of this. see :ref:`custom-scripts`. - - :param add_default_commands: if this is True then the default run and - shell commands will be added. - :param add_version_option: adds the ``--version`` option. - :param create_app: an optional callback that is passed the script info and - returns the loaded app. - :param load_dotenv: Load the nearest :file:`.env` and :file:`.flaskenv` - files to set environment variables. Will also change the working - directory to the directory containing the first file found. - :param set_debug_flag: Set the app's debug flag. - - .. versionchanged:: 2.2 - Added the ``-A/--app``, ``--debug/--no-debug``, ``-e/--env-file`` options. - - .. versionchanged:: 2.2 - An app context is pushed when running ``app.cli`` commands, so - ``@with_appcontext`` is no longer required for those commands. - - .. versionchanged:: 1.0 - If installed, python-dotenv will be used to load environment variables - from :file:`.env` and :file:`.flaskenv` files. - """ - - def __init__( - self, - add_default_commands: bool = True, - create_app: t.Callable[..., Flask] | None = None, - add_version_option: bool = True, - load_dotenv: bool = True, - set_debug_flag: bool = True, - **extra: t.Any, - ) -> None: - params = list(extra.pop("params", None) or ()) - # Processing is done with option callbacks instead of a group - # callback. This allows users to make a custom group callback - # without losing the behavior. --env-file must come first so - # that it is eagerly evaluated before --app. - params.extend((_env_file_option, _app_option, _debug_option)) - - if add_version_option: - params.append(version_option) - - if "context_settings" not in extra: - extra["context_settings"] = {} - - extra["context_settings"].setdefault("auto_envvar_prefix", "FLASK") - - super().__init__(params=params, **extra) - - self.create_app = create_app - self.load_dotenv = load_dotenv - self.set_debug_flag = set_debug_flag - - if add_default_commands: - self.add_command(run_command) - self.add_command(shell_command) - self.add_command(routes_command) - - self._loaded_plugin_commands = False - - def _load_plugin_commands(self) -> None: - if self._loaded_plugin_commands: - return - - if sys.version_info >= (3, 10): - from importlib import metadata - else: - # Use a backport on Python < 3.10. We technically have - # importlib.metadata on 3.8+, but the API changed in 3.10, - # so use the backport for consistency. - import importlib_metadata as metadata - - for ep in metadata.entry_points(group="flask.commands"): - self.add_command(ep.load(), ep.name) - - self._loaded_plugin_commands = True - - def get_command(self, ctx: click.Context, name: str) -> click.Command | None: - self._load_plugin_commands() - # Look up built-in and plugin commands, which should be - # available even if the app fails to load. - rv = super().get_command(ctx, name) - - if rv is not None: - return rv - - info = ctx.ensure_object(ScriptInfo) - - # Look up commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - app = info.load_app() - except NoAppException as e: - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - return None - - # Push an app context for the loaded app unless it is already - # active somehow. This makes the context available to parameter - # and command callbacks without needing @with_appcontext. - if not current_app or current_app._get_current_object() is not app: # type: ignore[attr-defined] - ctx.with_resource(app.app_context()) - - return app.cli.get_command(ctx, name) - - def list_commands(self, ctx: click.Context) -> list[str]: - self._load_plugin_commands() - # Start with the built-in and plugin commands. - rv = set(super().list_commands(ctx)) - info = ctx.ensure_object(ScriptInfo) - - # Add commands provided by the app, showing an error and - # continuing if the app couldn't be loaded. - try: - rv.update(info.load_app().cli.list_commands(ctx)) - except NoAppException as e: - # When an app couldn't be loaded, show the error message - # without the traceback. - click.secho(f"Error: {e.format_message()}\n", err=True, fg="red") - except Exception: - # When any other errors occurred during loading, show the - # full traceback. - click.secho(f"{traceback.format_exc()}\n", err=True, fg="red") - - return sorted(rv) - - def make_context( - self, - info_name: str | None, - args: list[str], - parent: click.Context | None = None, - **extra: t.Any, - ) -> click.Context: - # Set a flag to tell app.run to become a no-op. If app.run was - # not in a __name__ == __main__ guard, it would start the server - # when importing, blocking whatever command is being called. - os.environ["FLASK_RUN_FROM_CLI"] = "true" - - # Attempt to load .env and .flask env files. The --env-file - # option can cause another file to be loaded. - if get_load_dotenv(self.load_dotenv): - load_dotenv() - - if "obj" not in extra and "obj" not in self.context_settings: - extra["obj"] = ScriptInfo( - create_app=self.create_app, set_debug_flag=self.set_debug_flag - ) - - return super().make_context(info_name, args, parent=parent, **extra) - - def parse_args(self, ctx: click.Context, args: list[str]) -> list[str]: - if not args and self.no_args_is_help: - # Attempt to load --env-file and --app early in case they - # were given as env vars. Otherwise no_args_is_help will not - # see commands from app.cli. - _env_file_option.handle_parse_result(ctx, {}, []) - _app_option.handle_parse_result(ctx, {}, []) - - return super().parse_args(ctx, args) - - -def _path_is_ancestor(path: str, other: str) -> bool: - """Take ``other`` and remove the length of ``path`` from it. Then join it - to ``path``. If it is the original value, ``path`` is an ancestor of - ``other``.""" - return os.path.join(path, other[len(path) :].lstrip(os.sep)) == other - - -def load_dotenv(path: str | os.PathLike[str] | None = None) -> bool: - """Load "dotenv" files in order of precedence to set environment variables. - - If an env var is already set it is not overwritten, so earlier files in the - list are preferred over later files. - - This is a no-op if `python-dotenv`_ is not installed. - - .. _python-dotenv: https://github.com/theskumar/python-dotenv#readme - - :param path: Load the file at this location instead of searching. - :return: ``True`` if a file was loaded. - - .. versionchanged:: 2.0 - The current directory is not changed to the location of the - loaded file. - - .. versionchanged:: 2.0 - When loading the env files, set the default encoding to UTF-8. - - .. versionchanged:: 1.1.0 - Returns ``False`` when python-dotenv is not installed, or when - the given path isn't a file. - - .. versionadded:: 1.0 - """ - try: - import dotenv - except ImportError: - if path or os.path.isfile(".env") or os.path.isfile(".flaskenv"): - click.secho( - " * Tip: There are .env or .flaskenv files present." - ' Do "pip install python-dotenv" to use them.', - fg="yellow", - err=True, - ) - - return False - - # Always return after attempting to load a given path, don't load - # the default files. - if path is not None: - if os.path.isfile(path): - return dotenv.load_dotenv(path, encoding="utf-8") - - return False - - loaded = False - - for name in (".env", ".flaskenv"): - path = dotenv.find_dotenv(name, usecwd=True) - - if not path: - continue - - dotenv.load_dotenv(path, encoding="utf-8") - loaded = True - - return loaded # True if at least one file was located and loaded. - - -def show_server_banner(debug: bool, app_import_path: str | None) -> None: - """Show extra startup messages the first time the server is run, - ignoring the reloader. - """ - if is_running_from_reloader(): - return - - if app_import_path is not None: - click.echo(f" * Serving Flask app '{app_import_path}'") - - if debug is not None: - click.echo(f" * Debug mode: {'on' if debug else 'off'}") - - -class CertParamType(click.ParamType): - """Click option type for the ``--cert`` option. Allows either an - existing file, the string ``'adhoc'``, or an import for a - :class:`~ssl.SSLContext` object. - """ - - name = "path" - - def __init__(self) -> None: - self.path_type = click.Path(exists=True, dir_okay=False, resolve_path=True) - - def convert( - self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None - ) -> t.Any: - try: - import ssl - except ImportError: - raise click.BadParameter( - 'Using "--cert" requires Python to be compiled with SSL support.', - ctx, - param, - ) from None - - try: - return self.path_type(value, param, ctx) - except click.BadParameter: - value = click.STRING(value, param, ctx).lower() - - if value == "adhoc": - try: - import cryptography # noqa: F401 - except ImportError: - raise click.BadParameter( - "Using ad-hoc certificates requires the cryptography library.", - ctx, - param, - ) from None - - return value - - obj = import_string(value, silent=True) - - if isinstance(obj, ssl.SSLContext): - return obj - - raise - - -def _validate_key(ctx: click.Context, param: click.Parameter, value: t.Any) -> t.Any: - """The ``--key`` option must be specified when ``--cert`` is a file. - Modifies the ``cert`` param to be a ``(cert, key)`` pair if needed. - """ - cert = ctx.params.get("cert") - is_adhoc = cert == "adhoc" - - try: - import ssl - except ImportError: - is_context = False - else: - is_context = isinstance(cert, ssl.SSLContext) - - if value is not None: - if is_adhoc: - raise click.BadParameter( - 'When "--cert" is "adhoc", "--key" is not used.', ctx, param - ) - - if is_context: - raise click.BadParameter( - 'When "--cert" is an SSLContext object, "--key" is not used.', - ctx, - param, - ) - - if not cert: - raise click.BadParameter('"--cert" must also be specified.', ctx, param) - - ctx.params["cert"] = cert, value - - else: - if cert and not (is_adhoc or is_context): - raise click.BadParameter('Required when using "--cert".', ctx, param) - - return value - - -class SeparatedPathType(click.Path): - """Click option type that accepts a list of values separated by the - OS's path separator (``:``, ``;`` on Windows). Each value is - validated as a :class:`click.Path` type. - """ - - def convert( - self, value: t.Any, param: click.Parameter | None, ctx: click.Context | None - ) -> t.Any: - items = self.split_envvar_value(value) - # can't call no-arg super() inside list comprehension until Python 3.12 - super_convert = super().convert - return [super_convert(item, param, ctx) for item in items] - - -@click.command("run", short_help="Run a development server.") -@click.option("--host", "-h", default="127.0.0.1", help="The interface to bind to.") -@click.option("--port", "-p", default=5000, help="The port to bind to.") -@click.option( - "--cert", - type=CertParamType(), - help="Specify a certificate file to use HTTPS.", - is_eager=True, -) -@click.option( - "--key", - type=click.Path(exists=True, dir_okay=False, resolve_path=True), - callback=_validate_key, - expose_value=False, - help="The key file to use when specifying a certificate.", -) -@click.option( - "--reload/--no-reload", - default=None, - help="Enable or disable the reloader. By default the reloader " - "is active if debug is enabled.", -) -@click.option( - "--debugger/--no-debugger", - default=None, - help="Enable or disable the debugger. By default the debugger " - "is active if debug is enabled.", -) -@click.option( - "--with-threads/--without-threads", - default=True, - help="Enable or disable multithreading.", -) -@click.option( - "--extra-files", - default=None, - type=SeparatedPathType(), - help=( - "Extra files that trigger a reload on change. Multiple paths" - f" are separated by {os.path.pathsep!r}." - ), -) -@click.option( - "--exclude-patterns", - default=None, - type=SeparatedPathType(), - help=( - "Files matching these fnmatch patterns will not trigger a reload" - " on change. Multiple patterns are separated by" - f" {os.path.pathsep!r}." - ), -) -@pass_script_info -def run_command( - info: ScriptInfo, - host: str, - port: int, - reload: bool, - debugger: bool, - with_threads: bool, - cert: ssl.SSLContext | tuple[str, str | None] | t.Literal["adhoc"] | None, - extra_files: list[str] | None, - exclude_patterns: list[str] | None, -) -> None: - """Run a local development server. - - This server is for development purposes only. It does not provide - the stability, security, or performance of production WSGI servers. - - The reloader and debugger are enabled by default with the '--debug' - option. - """ - try: - app: WSGIApplication = info.load_app() - except Exception as e: - if is_running_from_reloader(): - # When reloading, print out the error immediately, but raise - # it later so the debugger or server can handle it. - traceback.print_exc() - err = e - - def app( - environ: WSGIEnvironment, start_response: StartResponse - ) -> cabc.Iterable[bytes]: - raise err from None - - else: - # When not reloading, raise the error immediately so the - # command fails. - raise e from None - - debug = get_debug_flag() - - if reload is None: - reload = debug - - if debugger is None: - debugger = debug - - show_server_banner(debug, info.app_import_path) - - run_simple( - host, - port, - app, - use_reloader=reload, - use_debugger=debugger, - threaded=with_threads, - ssl_context=cert, - extra_files=extra_files, - exclude_patterns=exclude_patterns, - ) - - -run_command.params.insert(0, _debug_option) - - -@click.command("shell", short_help="Run a shell in the app context.") -@with_appcontext -def shell_command() -> None: - """Run an interactive Python shell in the context of a given - Flask application. The application will populate the default - namespace of this shell according to its configuration. - - This is useful for executing small snippets of management code - without having to manually configure the application. - """ - import code - - banner = ( - f"Python {sys.version} on {sys.platform}\n" - f"App: {current_app.import_name}\n" - f"Instance: {current_app.instance_path}" - ) - ctx: dict[str, t.Any] = {} - - # Support the regular Python interpreter startup script if someone - # is using it. - startup = os.environ.get("PYTHONSTARTUP") - if startup and os.path.isfile(startup): - with open(startup) as f: - eval(compile(f.read(), startup, "exec"), ctx) - - ctx.update(current_app.make_shell_context()) - - # Site, customize, or startup script can set a hook to call when - # entering interactive mode. The default one sets up readline with - # tab and history completion. - interactive_hook = getattr(sys, "__interactivehook__", None) - - if interactive_hook is not None: - try: - import readline - from rlcompleter import Completer - except ImportError: - pass - else: - # rlcompleter uses __main__.__dict__ by default, which is - # flask.__main__. Use the shell context instead. - readline.set_completer(Completer(ctx).complete) - - interactive_hook() - - code.interact(banner=banner, local=ctx) - - -@click.command("routes", short_help="Show the routes for the app.") -@click.option( - "--sort", - "-s", - type=click.Choice(("endpoint", "methods", "domain", "rule", "match")), - default="endpoint", - help=( - "Method to sort routes by. 'match' is the order that Flask will match routes" - " when dispatching a request." - ), -) -@click.option("--all-methods", is_flag=True, help="Show HEAD and OPTIONS methods.") -@with_appcontext -def routes_command(sort: str, all_methods: bool) -> None: - """Show all registered routes with endpoints and methods.""" - rules = list(current_app.url_map.iter_rules()) - - if not rules: - click.echo("No routes were registered.") - return - - ignored_methods = set() if all_methods else {"HEAD", "OPTIONS"} - host_matching = current_app.url_map.host_matching - has_domain = any(rule.host if host_matching else rule.subdomain for rule in rules) - rows = [] - - for rule in rules: - row = [ - rule.endpoint, - ", ".join(sorted((rule.methods or set()) - ignored_methods)), - ] - - if has_domain: - row.append((rule.host if host_matching else rule.subdomain) or "") - - row.append(rule.rule) - rows.append(row) - - headers = ["Endpoint", "Methods"] - sorts = ["endpoint", "methods"] - - if has_domain: - headers.append("Host" if host_matching else "Subdomain") - sorts.append("domain") - - headers.append("Rule") - sorts.append("rule") - - try: - rows.sort(key=itemgetter(sorts.index(sort))) - except ValueError: - pass - - rows.insert(0, headers) - widths = [max(len(row[i]) for row in rows) for i in range(len(headers))] - rows.insert(1, ["-" * w for w in widths]) - template = " ".join(f"{{{i}:<{w}}}" for i, w in enumerate(widths)) - - for row in rows: - click.echo(template.format(*row)) - - -cli = FlaskGroup( - name="flask", - help="""\ -A general utility script for Flask applications. - -An application to load must be given with the '--app' option, -'FLASK_APP' environment variable, or with a 'wsgi.py' or 'app.py' file -in the current directory. -""", -) - - -def main() -> None: - cli.main() - - -if __name__ == "__main__": - main() diff --git a/myenv/Lib/site-packages/flask/config.py b/myenv/Lib/site-packages/flask/config.py deleted file mode 100644 index 7e3ba17..0000000 --- a/myenv/Lib/site-packages/flask/config.py +++ /dev/null @@ -1,370 +0,0 @@ -from __future__ import annotations - -import errno -import json -import os -import types -import typing as t - -from werkzeug.utils import import_string - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .sansio.app import App - - -T = t.TypeVar("T") - - -class ConfigAttribute(t.Generic[T]): - """Makes an attribute forward to the config""" - - def __init__( - self, name: str, get_converter: t.Callable[[t.Any], T] | None = None - ) -> None: - self.__name__ = name - self.get_converter = get_converter - - @t.overload - def __get__(self, obj: None, owner: None) -> te.Self: ... - - @t.overload - def __get__(self, obj: App, owner: type[App]) -> T: ... - - def __get__(self, obj: App | None, owner: type[App] | None = None) -> T | te.Self: - if obj is None: - return self - - rv = obj.config[self.__name__] - - if self.get_converter is not None: - rv = self.get_converter(rv) - - return rv # type: ignore[no-any-return] - - def __set__(self, obj: App, value: t.Any) -> None: - obj.config[self.__name__] = value - - -class Config(dict): # type: ignore[type-arg] - """Works exactly like a dict but provides ways to fill it from files - or special dictionaries. There are two common patterns to populate the - config. - - Either you can fill the config from a config file:: - - app.config.from_pyfile('yourconfig.cfg') - - Or alternatively you can define the configuration options in the - module that calls :meth:`from_object` or provide an import path to - a module that should be loaded. It is also possible to tell it to - use the same module and with that provide the configuration values - just before the call:: - - DEBUG = True - SECRET_KEY = 'development key' - app.config.from_object(__name__) - - In both cases (loading from any Python file or loading from modules), - only uppercase keys are added to the config. This makes it possible to use - lowercase values in the config file for temporary values that are not added - to the config or to define the config keys in the same file that implements - the application. - - Probably the most interesting way to load configurations is from an - environment variable pointing to a file:: - - app.config.from_envvar('YOURAPPLICATION_SETTINGS') - - In this case before launching the application you have to set this - environment variable to the file you want to use. On Linux and OS X - use the export statement:: - - export YOURAPPLICATION_SETTINGS='/path/to/config/file' - - On windows use `set` instead. - - :param root_path: path to which files are read relative from. When the - config object is created by the application, this is - the application's :attr:`~flask.Flask.root_path`. - :param defaults: an optional dictionary of default values - """ - - def __init__( - self, - root_path: str | os.PathLike[str], - defaults: dict[str, t.Any] | None = None, - ) -> None: - super().__init__(defaults or {}) - self.root_path = root_path - - def from_envvar(self, variable_name: str, silent: bool = False) -> bool: - """Loads a configuration from an environment variable pointing to - a configuration file. This is basically just a shortcut with nicer - error messages for this line of code:: - - app.config.from_pyfile(os.environ['YOURAPPLICATION_SETTINGS']) - - :param variable_name: name of the environment variable - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - """ - rv = os.environ.get(variable_name) - if not rv: - if silent: - return False - raise RuntimeError( - f"The environment variable {variable_name!r} is not set" - " and as such configuration could not be loaded. Set" - " this variable and make it point to a configuration" - " file" - ) - return self.from_pyfile(rv, silent=silent) - - def from_prefixed_env( - self, prefix: str = "FLASK", *, loads: t.Callable[[str], t.Any] = json.loads - ) -> bool: - """Load any environment variables that start with ``FLASK_``, - dropping the prefix from the env key for the config key. Values - are passed through a loading function to attempt to convert them - to more specific types than strings. - - Keys are loaded in :func:`sorted` order. - - The default loading function attempts to parse values as any - valid JSON type, including dicts and lists. - - Specific items in nested dicts can be set by separating the - keys with double underscores (``__``). If an intermediate key - doesn't exist, it will be initialized to an empty dict. - - :param prefix: Load env vars that start with this prefix, - separated with an underscore (``_``). - :param loads: Pass each string value to this function and use - the returned value as the config value. If any error is - raised it is ignored and the value remains a string. The - default is :func:`json.loads`. - - .. versionadded:: 2.1 - """ - prefix = f"{prefix}_" - len_prefix = len(prefix) - - for key in sorted(os.environ): - if not key.startswith(prefix): - continue - - value = os.environ[key] - - try: - value = loads(value) - except Exception: - # Keep the value as a string if loading failed. - pass - - # Change to key.removeprefix(prefix) on Python >= 3.9. - key = key[len_prefix:] - - if "__" not in key: - # A non-nested key, set directly. - self[key] = value - continue - - # Traverse nested dictionaries with keys separated by "__". - current = self - *parts, tail = key.split("__") - - for part in parts: - # If an intermediate dict does not exist, create it. - if part not in current: - current[part] = {} - - current = current[part] - - current[tail] = value - - return True - - def from_pyfile( - self, filename: str | os.PathLike[str], silent: bool = False - ) -> bool: - """Updates the values in the config from a Python file. This function - behaves as if the file was imported as module with the - :meth:`from_object` function. - - :param filename: the filename of the config. This can either be an - absolute filename or a filename relative to the - root path. - :param silent: set to ``True`` if you want silent failure for missing - files. - :return: ``True`` if the file was loaded successfully. - - .. versionadded:: 0.7 - `silent` parameter. - """ - filename = os.path.join(self.root_path, filename) - d = types.ModuleType("config") - d.__file__ = filename - try: - with open(filename, mode="rb") as config_file: - exec(compile(config_file.read(), filename, "exec"), d.__dict__) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR, errno.ENOTDIR): - return False - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - self.from_object(d) - return True - - def from_object(self, obj: object | str) -> None: - """Updates the values from the given object. An object can be of one - of the following two types: - - - a string: in this case the object with that name will be imported - - an actual object reference: that object is used directly - - Objects are usually either modules or classes. :meth:`from_object` - loads only the uppercase attributes of the module/class. A ``dict`` - object will not work with :meth:`from_object` because the keys of a - ``dict`` are not attributes of the ``dict`` class. - - Example of module-based configuration:: - - app.config.from_object('yourapplication.default_config') - from yourapplication import default_config - app.config.from_object(default_config) - - Nothing is done to the object before loading. If the object is a - class and has ``@property`` attributes, it needs to be - instantiated before being passed to this method. - - You should not use this function to load the actual configuration but - rather configuration defaults. The actual config should be loaded - with :meth:`from_pyfile` and ideally from a location not within the - package because the package might be installed system wide. - - See :ref:`config-dev-prod` for an example of class-based configuration - using :meth:`from_object`. - - :param obj: an import name or object - """ - if isinstance(obj, str): - obj = import_string(obj) - for key in dir(obj): - if key.isupper(): - self[key] = getattr(obj, key) - - def from_file( - self, - filename: str | os.PathLike[str], - load: t.Callable[[t.IO[t.Any]], t.Mapping[str, t.Any]], - silent: bool = False, - text: bool = True, - ) -> bool: - """Update the values in the config from a file that is loaded - using the ``load`` parameter. The loaded data is passed to the - :meth:`from_mapping` method. - - .. code-block:: python - - import json - app.config.from_file("config.json", load=json.load) - - import tomllib - app.config.from_file("config.toml", load=tomllib.load, text=False) - - :param filename: The path to the data file. This can be an - absolute path or relative to the config root path. - :param load: A callable that takes a file handle and returns a - mapping of loaded data from the file. - :type load: ``Callable[[Reader], Mapping]`` where ``Reader`` - implements a ``read`` method. - :param silent: Ignore the file if it doesn't exist. - :param text: Open the file in text or binary mode. - :return: ``True`` if the file was loaded successfully. - - .. versionchanged:: 2.3 - The ``text`` parameter was added. - - .. versionadded:: 2.0 - """ - filename = os.path.join(self.root_path, filename) - - try: - with open(filename, "r" if text else "rb") as f: - obj = load(f) - except OSError as e: - if silent and e.errno in (errno.ENOENT, errno.EISDIR): - return False - - e.strerror = f"Unable to load configuration file ({e.strerror})" - raise - - return self.from_mapping(obj) - - def from_mapping( - self, mapping: t.Mapping[str, t.Any] | None = None, **kwargs: t.Any - ) -> bool: - """Updates the config like :meth:`update` ignoring items with - non-upper keys. - - :return: Always returns ``True``. - - .. versionadded:: 0.11 - """ - mappings: dict[str, t.Any] = {} - if mapping is not None: - mappings.update(mapping) - mappings.update(kwargs) - for key, value in mappings.items(): - if key.isupper(): - self[key] = value - return True - - def get_namespace( - self, namespace: str, lowercase: bool = True, trim_namespace: bool = True - ) -> dict[str, t.Any]: - """Returns a dictionary containing a subset of configuration options - that match the specified namespace/prefix. Example usage:: - - app.config['IMAGE_STORE_TYPE'] = 'fs' - app.config['IMAGE_STORE_PATH'] = '/var/app/images' - app.config['IMAGE_STORE_BASE_URL'] = 'http://img.website.com' - image_store_config = app.config.get_namespace('IMAGE_STORE_') - - The resulting dictionary `image_store_config` would look like:: - - { - 'type': 'fs', - 'path': '/var/app/images', - 'base_url': 'http://img.website.com' - } - - This is often useful when configuration options map directly to - keyword arguments in functions or class constructors. - - :param namespace: a configuration namespace - :param lowercase: a flag indicating if the keys of the resulting - dictionary should be lowercase - :param trim_namespace: a flag indicating if the keys of the resulting - dictionary should not include the namespace - - .. versionadded:: 0.11 - """ - rv = {} - for k, v in self.items(): - if not k.startswith(namespace): - continue - if trim_namespace: - key = k[len(namespace) :] - else: - key = k - if lowercase: - key = key.lower() - rv[key] = v - return rv - - def __repr__(self) -> str: - return f"<{type(self).__name__} {dict.__repr__(self)}>" diff --git a/myenv/Lib/site-packages/flask/ctx.py b/myenv/Lib/site-packages/flask/ctx.py deleted file mode 100644 index 9b164d3..0000000 --- a/myenv/Lib/site-packages/flask/ctx.py +++ /dev/null @@ -1,449 +0,0 @@ -from __future__ import annotations - -import contextvars -import sys -import typing as t -from functools import update_wrapper -from types import TracebackType - -from werkzeug.exceptions import HTTPException - -from . import typing as ft -from .globals import _cv_app -from .globals import _cv_request -from .signals import appcontext_popped -from .signals import appcontext_pushed - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIEnvironment - - from .app import Flask - from .sessions import SessionMixin - from .wrappers import Request - - -# a singleton sentinel value for parameter defaults -_sentinel = object() - - -class _AppCtxGlobals: - """A plain object. Used as a namespace for storing data during an - application context. - - Creating an app context automatically creates this object, which is - made available as the :data:`g` proxy. - - .. describe:: 'key' in g - - Check whether an attribute is present. - - .. versionadded:: 0.10 - - .. describe:: iter(g) - - Return an iterator over the attribute names. - - .. versionadded:: 0.10 - """ - - # Define attr methods to let mypy know this is a namespace object - # that has arbitrary attributes. - - def __getattr__(self, name: str) -> t.Any: - try: - return self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def __setattr__(self, name: str, value: t.Any) -> None: - self.__dict__[name] = value - - def __delattr__(self, name: str) -> None: - try: - del self.__dict__[name] - except KeyError: - raise AttributeError(name) from None - - def get(self, name: str, default: t.Any | None = None) -> t.Any: - """Get an attribute by name, or a default value. Like - :meth:`dict.get`. - - :param name: Name of attribute to get. - :param default: Value to return if the attribute is not present. - - .. versionadded:: 0.10 - """ - return self.__dict__.get(name, default) - - def pop(self, name: str, default: t.Any = _sentinel) -> t.Any: - """Get and remove an attribute by name. Like :meth:`dict.pop`. - - :param name: Name of attribute to pop. - :param default: Value to return if the attribute is not present, - instead of raising a ``KeyError``. - - .. versionadded:: 0.11 - """ - if default is _sentinel: - return self.__dict__.pop(name) - else: - return self.__dict__.pop(name, default) - - def setdefault(self, name: str, default: t.Any = None) -> t.Any: - """Get the value of an attribute if it is present, otherwise - set and return a default value. Like :meth:`dict.setdefault`. - - :param name: Name of attribute to get. - :param default: Value to set and return if the attribute is not - present. - - .. versionadded:: 0.11 - """ - return self.__dict__.setdefault(name, default) - - def __contains__(self, item: str) -> bool: - return item in self.__dict__ - - def __iter__(self) -> t.Iterator[str]: - return iter(self.__dict__) - - def __repr__(self) -> str: - ctx = _cv_app.get(None) - if ctx is not None: - return f"" - return object.__repr__(self) - - -def after_this_request( - f: ft.AfterRequestCallable[t.Any], -) -> ft.AfterRequestCallable[t.Any]: - """Executes a function after this request. This is useful to modify - response objects. The function is passed the response object and has - to return the same or a new one. - - Example:: - - @app.route('/') - def index(): - @after_this_request - def add_header(response): - response.headers['X-Foo'] = 'Parachute' - return response - return 'Hello World!' - - This is more useful if a function other than the view function wants to - modify a response. For instance think of a decorator that wants to add - some headers without converting the return value into a response object. - - .. versionadded:: 0.9 - """ - ctx = _cv_request.get(None) - - if ctx is None: - raise RuntimeError( - "'after_this_request' can only be used when a request" - " context is active, such as in a view function." - ) - - ctx._after_request_functions.append(f) - return f - - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - - -def copy_current_request_context(f: F) -> F: - """A helper function that decorates a function to retain the current - request context. This is useful when working with greenlets. The moment - the function is decorated a copy of the request context is created and - then pushed when the function is called. The current session is also - included in the copied request context. - - Example:: - - import gevent - from flask import copy_current_request_context - - @app.route('/') - def index(): - @copy_current_request_context - def do_some_work(): - # do some work here, it can access flask.request or - # flask.session like you would otherwise in the view function. - ... - gevent.spawn(do_some_work) - return 'Regular response' - - .. versionadded:: 0.10 - """ - ctx = _cv_request.get(None) - - if ctx is None: - raise RuntimeError( - "'copy_current_request_context' can only be used when a" - " request context is active, such as in a view function." - ) - - ctx = ctx.copy() - - def wrapper(*args: t.Any, **kwargs: t.Any) -> t.Any: - with ctx: # type: ignore[union-attr] - return ctx.app.ensure_sync(f)(*args, **kwargs) # type: ignore[union-attr] - - return update_wrapper(wrapper, f) # type: ignore[return-value] - - -def has_request_context() -> bool: - """If you have code that wants to test if a request context is there or - not this function can be used. For instance, you may want to take advantage - of request information if the request object is available, but fail - silently if it is unavailable. - - :: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and has_request_context(): - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - Alternatively you can also just test any of the context bound objects - (such as :class:`request` or :class:`g`) for truthness:: - - class User(db.Model): - - def __init__(self, username, remote_addr=None): - self.username = username - if remote_addr is None and request: - remote_addr = request.remote_addr - self.remote_addr = remote_addr - - .. versionadded:: 0.7 - """ - return _cv_request.get(None) is not None - - -def has_app_context() -> bool: - """Works like :func:`has_request_context` but for the application - context. You can also just do a boolean check on the - :data:`current_app` object instead. - - .. versionadded:: 0.9 - """ - return _cv_app.get(None) is not None - - -class AppContext: - """The app context contains application-specific information. An app - context is created and pushed at the beginning of each request if - one is not already active. An app context is also pushed when - running CLI commands. - """ - - def __init__(self, app: Flask) -> None: - self.app = app - self.url_adapter = app.create_url_adapter(None) - self.g: _AppCtxGlobals = app.app_ctx_globals_class() - self._cv_tokens: list[contextvars.Token[AppContext]] = [] - - def push(self) -> None: - """Binds the app context to the current context.""" - self._cv_tokens.append(_cv_app.set(self)) - appcontext_pushed.send(self.app, _async_wrapper=self.app.ensure_sync) - - def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore - """Pops the app context.""" - try: - if len(self._cv_tokens) == 1: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_appcontext(exc) - finally: - ctx = _cv_app.get() - _cv_app.reset(self._cv_tokens.pop()) - - if ctx is not self: - raise AssertionError( - f"Popped wrong app context. ({ctx!r} instead of {self!r})" - ) - - appcontext_popped.send(self.app, _async_wrapper=self.app.ensure_sync) - - def __enter__(self) -> AppContext: - self.push() - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.pop(exc_value) - - -class RequestContext: - """The request context contains per-request information. The Flask - app creates and pushes it at the beginning of the request, then pops - it at the end of the request. It will create the URL adapter and - request object for the WSGI environment provided. - - Do not attempt to use this class directly, instead use - :meth:`~flask.Flask.test_request_context` and - :meth:`~flask.Flask.request_context` to create this object. - - When the request context is popped, it will evaluate all the - functions registered on the application for teardown execution - (:meth:`~flask.Flask.teardown_request`). - - The request context is automatically popped at the end of the - request. When using the interactive debugger, the context will be - restored so ``request`` is still accessible. Similarly, the test - client can preserve the context after the request ends. However, - teardown functions may already have closed some resources such as - database connections. - """ - - def __init__( - self, - app: Flask, - environ: WSGIEnvironment, - request: Request | None = None, - session: SessionMixin | None = None, - ) -> None: - self.app = app - if request is None: - request = app.request_class(environ) - request.json_module = app.json - self.request: Request = request - self.url_adapter = None - try: - self.url_adapter = app.create_url_adapter(self.request) - except HTTPException as e: - self.request.routing_exception = e - self.flashes: list[tuple[str, str]] | None = None - self.session: SessionMixin | None = session - # Functions that should be executed after the request on the response - # object. These will be called before the regular "after_request" - # functions. - self._after_request_functions: list[ft.AfterRequestCallable[t.Any]] = [] - - self._cv_tokens: list[ - tuple[contextvars.Token[RequestContext], AppContext | None] - ] = [] - - def copy(self) -> RequestContext: - """Creates a copy of this request context with the same request object. - This can be used to move a request context to a different greenlet. - Because the actual request object is the same this cannot be used to - move a request context to a different thread unless access to the - request object is locked. - - .. versionadded:: 0.10 - - .. versionchanged:: 1.1 - The current session object is used instead of reloading the original - data. This prevents `flask.session` pointing to an out-of-date object. - """ - return self.__class__( - self.app, - environ=self.request.environ, - request=self.request, - session=self.session, - ) - - def match_request(self) -> None: - """Can be overridden by a subclass to hook into the matching - of the request. - """ - try: - result = self.url_adapter.match(return_rule=True) # type: ignore - self.request.url_rule, self.request.view_args = result # type: ignore - except HTTPException as e: - self.request.routing_exception = e - - def push(self) -> None: - # Before we push the request context we have to ensure that there - # is an application context. - app_ctx = _cv_app.get(None) - - if app_ctx is None or app_ctx.app is not self.app: - app_ctx = self.app.app_context() - app_ctx.push() - else: - app_ctx = None - - self._cv_tokens.append((_cv_request.set(self), app_ctx)) - - # Open the session at the moment that the request context is available. - # This allows a custom open_session method to use the request context. - # Only open a new session if this is the first time the request was - # pushed, otherwise stream_with_context loses the session. - if self.session is None: - session_interface = self.app.session_interface - self.session = session_interface.open_session(self.app, self.request) - - if self.session is None: - self.session = session_interface.make_null_session(self.app) - - # Match the request URL after loading the session, so that the - # session is available in custom URL converters. - if self.url_adapter is not None: - self.match_request() - - def pop(self, exc: BaseException | None = _sentinel) -> None: # type: ignore - """Pops the request context and unbinds it by doing that. This will - also trigger the execution of functions registered by the - :meth:`~flask.Flask.teardown_request` decorator. - - .. versionchanged:: 0.9 - Added the `exc` argument. - """ - clear_request = len(self._cv_tokens) == 1 - - try: - if clear_request: - if exc is _sentinel: - exc = sys.exc_info()[1] - self.app.do_teardown_request(exc) - - request_close = getattr(self.request, "close", None) - if request_close is not None: - request_close() - finally: - ctx = _cv_request.get() - token, app_ctx = self._cv_tokens.pop() - _cv_request.reset(token) - - # get rid of circular dependencies at the end of the request - # so that we don't require the GC to be active. - if clear_request: - ctx.request.environ["werkzeug.request"] = None - - if app_ctx is not None: - app_ctx.pop(exc) - - if ctx is not self: - raise AssertionError( - f"Popped wrong request context. ({ctx!r} instead of {self!r})" - ) - - def __enter__(self) -> RequestContext: - self.push() - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.pop(exc_value) - - def __repr__(self) -> str: - return ( - f"<{type(self).__name__} {self.request.url!r}" - f" [{self.request.method}] of {self.app.name}>" - ) diff --git a/myenv/Lib/site-packages/flask/debughelpers.py b/myenv/Lib/site-packages/flask/debughelpers.py deleted file mode 100644 index 2c8c4c4..0000000 --- a/myenv/Lib/site-packages/flask/debughelpers.py +++ /dev/null @@ -1,178 +0,0 @@ -from __future__ import annotations - -import typing as t - -from jinja2.loaders import BaseLoader -from werkzeug.routing import RequestRedirect - -from .blueprints import Blueprint -from .globals import request_ctx -from .sansio.app import App - -if t.TYPE_CHECKING: - from .sansio.scaffold import Scaffold - from .wrappers import Request - - -class UnexpectedUnicodeError(AssertionError, UnicodeError): - """Raised in places where we want some better error reporting for - unexpected unicode or binary data. - """ - - -class DebugFilesKeyError(KeyError, AssertionError): - """Raised from request.files during debugging. The idea is that it can - provide a better error message than just a generic KeyError/BadRequest. - """ - - def __init__(self, request: Request, key: str) -> None: - form_matches = request.form.getlist(key) - buf = [ - f"You tried to access the file {key!r} in the request.files" - " dictionary but it does not exist. The mimetype for the" - f" request is {request.mimetype!r} instead of" - " 'multipart/form-data' which means that no file contents" - " were transmitted. To fix this error you should provide" - ' enctype="multipart/form-data" in your form.' - ] - if form_matches: - names = ", ".join(repr(x) for x in form_matches) - buf.append( - "\n\nThe browser instead transmitted some file names. " - f"This was submitted: {names}" - ) - self.msg = "".join(buf) - - def __str__(self) -> str: - return self.msg - - -class FormDataRoutingRedirect(AssertionError): - """This exception is raised in debug mode if a routing redirect - would cause the browser to drop the method or body. This happens - when method is not GET, HEAD or OPTIONS and the status code is not - 307 or 308. - """ - - def __init__(self, request: Request) -> None: - exc = request.routing_exception - assert isinstance(exc, RequestRedirect) - buf = [ - f"A request was sent to '{request.url}', but routing issued" - f" a redirect to the canonical URL '{exc.new_url}'." - ] - - if f"{request.base_url}/" == exc.new_url.partition("?")[0]: - buf.append( - " The URL was defined with a trailing slash. Flask" - " will redirect to the URL with a trailing slash if it" - " was accessed without one." - ) - - buf.append( - " Send requests to the canonical URL, or use 307 or 308 for" - " routing redirects. Otherwise, browsers will drop form" - " data.\n\n" - "This exception is only raised in debug mode." - ) - super().__init__("".join(buf)) - - -def attach_enctype_error_multidict(request: Request) -> None: - """Patch ``request.files.__getitem__`` to raise a descriptive error - about ``enctype=multipart/form-data``. - - :param request: The request to patch. - :meta private: - """ - oldcls = request.files.__class__ - - class newcls(oldcls): # type: ignore[valid-type, misc] - def __getitem__(self, key: str) -> t.Any: - try: - return super().__getitem__(key) - except KeyError as e: - if key not in request.form: - raise - - raise DebugFilesKeyError(request, key).with_traceback( - e.__traceback__ - ) from None - - newcls.__name__ = oldcls.__name__ - newcls.__module__ = oldcls.__module__ - request.files.__class__ = newcls - - -def _dump_loader_info(loader: BaseLoader) -> t.Iterator[str]: - yield f"class: {type(loader).__module__}.{type(loader).__name__}" - for key, value in sorted(loader.__dict__.items()): - if key.startswith("_"): - continue - if isinstance(value, (tuple, list)): - if not all(isinstance(x, str) for x in value): - continue - yield f"{key}:" - for item in value: - yield f" - {item}" - continue - elif not isinstance(value, (str, int, float, bool)): - continue - yield f"{key}: {value!r}" - - -def explain_template_loading_attempts( - app: App, - template: str, - attempts: list[ - tuple[ - BaseLoader, - Scaffold, - tuple[str, str | None, t.Callable[[], bool] | None] | None, - ] - ], -) -> None: - """This should help developers understand what failed""" - info = [f"Locating template {template!r}:"] - total_found = 0 - blueprint = None - if request_ctx and request_ctx.request.blueprint is not None: - blueprint = request_ctx.request.blueprint - - for idx, (loader, srcobj, triple) in enumerate(attempts): - if isinstance(srcobj, App): - src_info = f"application {srcobj.import_name!r}" - elif isinstance(srcobj, Blueprint): - src_info = f"blueprint {srcobj.name!r} ({srcobj.import_name})" - else: - src_info = repr(srcobj) - - info.append(f"{idx + 1:5}: trying loader of {src_info}") - - for line in _dump_loader_info(loader): - info.append(f" {line}") - - if triple is None: - detail = "no match" - else: - detail = f"found ({triple[1] or ''!r})" - total_found += 1 - info.append(f" -> {detail}") - - seems_fishy = False - if total_found == 0: - info.append("Error: the template could not be found.") - seems_fishy = True - elif total_found > 1: - info.append("Warning: multiple loaders returned a match for the template.") - seems_fishy = True - - if blueprint is not None and seems_fishy: - info.append( - " The template was looked up from an endpoint that belongs" - f" to the blueprint {blueprint!r}." - ) - info.append(" Maybe you did not place a template in the right folder?") - info.append(" See https://flask.palletsprojects.com/blueprints/#templates") - - app.logger.info("\n".join(info)) diff --git a/myenv/Lib/site-packages/flask/globals.py b/myenv/Lib/site-packages/flask/globals.py deleted file mode 100644 index e2c410c..0000000 --- a/myenv/Lib/site-packages/flask/globals.py +++ /dev/null @@ -1,51 +0,0 @@ -from __future__ import annotations - -import typing as t -from contextvars import ContextVar - -from werkzeug.local import LocalProxy - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .ctx import _AppCtxGlobals - from .ctx import AppContext - from .ctx import RequestContext - from .sessions import SessionMixin - from .wrappers import Request - - -_no_app_msg = """\ -Working outside of application context. - -This typically means that you attempted to use functionality that needed -the current application. To solve this, set up an application context -with app.app_context(). See the documentation for more information.\ -""" -_cv_app: ContextVar[AppContext] = ContextVar("flask.app_ctx") -app_ctx: AppContext = LocalProxy( # type: ignore[assignment] - _cv_app, unbound_message=_no_app_msg -) -current_app: Flask = LocalProxy( # type: ignore[assignment] - _cv_app, "app", unbound_message=_no_app_msg -) -g: _AppCtxGlobals = LocalProxy( # type: ignore[assignment] - _cv_app, "g", unbound_message=_no_app_msg -) - -_no_req_msg = """\ -Working outside of request context. - -This typically means that you attempted to use functionality that needed -an active HTTP request. Consult the documentation on testing for -information about how to avoid this problem.\ -""" -_cv_request: ContextVar[RequestContext] = ContextVar("flask.request_ctx") -request_ctx: RequestContext = LocalProxy( # type: ignore[assignment] - _cv_request, unbound_message=_no_req_msg -) -request: Request = LocalProxy( # type: ignore[assignment] - _cv_request, "request", unbound_message=_no_req_msg -) -session: SessionMixin = LocalProxy( # type: ignore[assignment] - _cv_request, "session", unbound_message=_no_req_msg -) diff --git a/myenv/Lib/site-packages/flask/helpers.py b/myenv/Lib/site-packages/flask/helpers.py deleted file mode 100644 index 359a842..0000000 --- a/myenv/Lib/site-packages/flask/helpers.py +++ /dev/null @@ -1,621 +0,0 @@ -from __future__ import annotations - -import importlib.util -import os -import sys -import typing as t -from datetime import datetime -from functools import lru_cache -from functools import update_wrapper - -import werkzeug.utils -from werkzeug.exceptions import abort as _wz_abort -from werkzeug.utils import redirect as _wz_redirect -from werkzeug.wrappers import Response as BaseResponse - -from .globals import _cv_request -from .globals import current_app -from .globals import request -from .globals import request_ctx -from .globals import session -from .signals import message_flashed - -if t.TYPE_CHECKING: # pragma: no cover - from .wrappers import Response - - -def get_debug_flag() -> bool: - """Get whether debug mode should be enabled for the app, indicated by the - :envvar:`FLASK_DEBUG` environment variable. The default is ``False``. - """ - val = os.environ.get("FLASK_DEBUG") - return bool(val and val.lower() not in {"0", "false", "no"}) - - -def get_load_dotenv(default: bool = True) -> bool: - """Get whether the user has disabled loading default dotenv files by - setting :envvar:`FLASK_SKIP_DOTENV`. The default is ``True``, load - the files. - - :param default: What to return if the env var isn't set. - """ - val = os.environ.get("FLASK_SKIP_DOTENV") - - if not val: - return default - - return val.lower() in ("0", "false", "no") - - -def stream_with_context( - generator_or_function: t.Iterator[t.AnyStr] | t.Callable[..., t.Iterator[t.AnyStr]], -) -> t.Iterator[t.AnyStr]: - """Request contexts disappear when the response is started on the server. - This is done for efficiency reasons and to make it less likely to encounter - memory leaks with badly written WSGI middlewares. The downside is that if - you are using streamed responses, the generator cannot access request bound - information any more. - - This function however can help you keep the context around for longer:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - @stream_with_context - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(generate()) - - Alternatively it can also be used around a specific generator:: - - from flask import stream_with_context, request, Response - - @app.route('/stream') - def streamed_response(): - def generate(): - yield 'Hello ' - yield request.args['name'] - yield '!' - return Response(stream_with_context(generate())) - - .. versionadded:: 0.9 - """ - try: - gen = iter(generator_or_function) # type: ignore[arg-type] - except TypeError: - - def decorator(*args: t.Any, **kwargs: t.Any) -> t.Any: - gen = generator_or_function(*args, **kwargs) # type: ignore[operator] - return stream_with_context(gen) - - return update_wrapper(decorator, generator_or_function) # type: ignore[arg-type] - - def generator() -> t.Iterator[t.AnyStr | None]: - ctx = _cv_request.get(None) - if ctx is None: - raise RuntimeError( - "'stream_with_context' can only be used when a request" - " context is active, such as in a view function." - ) - with ctx: - # Dummy sentinel. Has to be inside the context block or we're - # not actually keeping the context around. - yield None - - # The try/finally is here so that if someone passes a WSGI level - # iterator in we're still running the cleanup logic. Generators - # don't need that because they are closed on their destruction - # automatically. - try: - yield from gen - finally: - if hasattr(gen, "close"): - gen.close() - - # The trick is to start the generator. Then the code execution runs until - # the first dummy None is yielded at which point the context was already - # pushed. This item is discarded. Then when the iteration continues the - # real generator is executed. - wrapped_g = generator() - next(wrapped_g) - return wrapped_g # type: ignore[return-value] - - -def make_response(*args: t.Any) -> Response: - """Sometimes it is necessary to set additional headers in a view. Because - views do not have to return response objects but can return a value that - is converted into a response object by Flask itself, it becomes tricky to - add headers to it. This function can be called instead of using a return - and you will get a response object which you can use to attach headers. - - If view looked like this and you want to add a new header:: - - def index(): - return render_template('index.html', foo=42) - - You can now do something like this:: - - def index(): - response = make_response(render_template('index.html', foo=42)) - response.headers['X-Parachutes'] = 'parachutes are cool' - return response - - This function accepts the very same arguments you can return from a - view function. This for example creates a response with a 404 error - code:: - - response = make_response(render_template('not_found.html'), 404) - - The other use case of this function is to force the return value of a - view function into a response which is helpful with view - decorators:: - - response = make_response(view_function()) - response.headers['X-Parachutes'] = 'parachutes are cool' - - Internally this function does the following things: - - - if no arguments are passed, it creates a new response argument - - if one argument is passed, :meth:`flask.Flask.make_response` - is invoked with it. - - if more than one argument is passed, the arguments are passed - to the :meth:`flask.Flask.make_response` function as tuple. - - .. versionadded:: 0.6 - """ - if not args: - return current_app.response_class() - if len(args) == 1: - args = args[0] - return current_app.make_response(args) - - -def url_for( - endpoint: str, - *, - _anchor: str | None = None, - _method: str | None = None, - _scheme: str | None = None, - _external: bool | None = None, - **values: t.Any, -) -> str: - """Generate a URL to the given endpoint with the given values. - - This requires an active request or application context, and calls - :meth:`current_app.url_for() `. See that method - for full documentation. - - :param endpoint: The endpoint name associated with the URL to - generate. If this starts with a ``.``, the current blueprint - name (if any) will be used. - :param _anchor: If given, append this as ``#anchor`` to the URL. - :param _method: If given, generate the URL associated with this - method for the endpoint. - :param _scheme: If given, the URL will have this scheme if it is - external. - :param _external: If given, prefer the URL to be internal (False) or - require it to be external (True). External URLs include the - scheme and domain. When not in an active request, URLs are - external by default. - :param values: Values to use for the variable parts of the URL rule. - Unknown keys are appended as query string arguments, like - ``?a=b&c=d``. - - .. versionchanged:: 2.2 - Calls ``current_app.url_for``, allowing an app to override the - behavior. - - .. versionchanged:: 0.10 - The ``_scheme`` parameter was added. - - .. versionchanged:: 0.9 - The ``_anchor`` and ``_method`` parameters were added. - - .. versionchanged:: 0.9 - Calls ``app.handle_url_build_error`` on build errors. - """ - return current_app.url_for( - endpoint, - _anchor=_anchor, - _method=_method, - _scheme=_scheme, - _external=_external, - **values, - ) - - -def redirect( - location: str, code: int = 302, Response: type[BaseResponse] | None = None -) -> BaseResponse: - """Create a redirect response object. - - If :data:`~flask.current_app` is available, it will use its - :meth:`~flask.Flask.redirect` method, otherwise it will use - :func:`werkzeug.utils.redirect`. - - :param location: The URL to redirect to. - :param code: The status code for the redirect. - :param Response: The response class to use. Not used when - ``current_app`` is active, which uses ``app.response_class``. - - .. versionadded:: 2.2 - Calls ``current_app.redirect`` if available instead of always - using Werkzeug's default ``redirect``. - """ - if current_app: - return current_app.redirect(location, code=code) - - return _wz_redirect(location, code=code, Response=Response) - - -def abort(code: int | BaseResponse, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: - """Raise an :exc:`~werkzeug.exceptions.HTTPException` for the given - status code. - - If :data:`~flask.current_app` is available, it will call its - :attr:`~flask.Flask.aborter` object, otherwise it will use - :func:`werkzeug.exceptions.abort`. - - :param code: The status code for the exception, which must be - registered in ``app.aborter``. - :param args: Passed to the exception. - :param kwargs: Passed to the exception. - - .. versionadded:: 2.2 - Calls ``current_app.aborter`` if available instead of always - using Werkzeug's default ``abort``. - """ - if current_app: - current_app.aborter(code, *args, **kwargs) - - _wz_abort(code, *args, **kwargs) - - -def get_template_attribute(template_name: str, attribute: str) -> t.Any: - """Loads a macro (or variable) a template exports. This can be used to - invoke a macro from within Python code. If you for example have a - template named :file:`_cider.html` with the following contents: - - .. sourcecode:: html+jinja - - {% macro hello(name) %}Hello {{ name }}!{% endmacro %} - - You can access this from Python code like this:: - - hello = get_template_attribute('_cider.html', 'hello') - return hello('World') - - .. versionadded:: 0.2 - - :param template_name: the name of the template - :param attribute: the name of the variable of macro to access - """ - return getattr(current_app.jinja_env.get_template(template_name).module, attribute) - - -def flash(message: str, category: str = "message") -> None: - """Flashes a message to the next request. In order to remove the - flashed message from the session and to display it to the user, - the template has to call :func:`get_flashed_messages`. - - .. versionchanged:: 0.3 - `category` parameter added. - - :param message: the message to be flashed. - :param category: the category for the message. The following values - are recommended: ``'message'`` for any kind of message, - ``'error'`` for errors, ``'info'`` for information - messages and ``'warning'`` for warnings. However any - kind of string can be used as category. - """ - # Original implementation: - # - # session.setdefault('_flashes', []).append((category, message)) - # - # This assumed that changes made to mutable structures in the session are - # always in sync with the session object, which is not true for session - # implementations that use external storage for keeping their keys/values. - flashes = session.get("_flashes", []) - flashes.append((category, message)) - session["_flashes"] = flashes - app = current_app._get_current_object() # type: ignore - message_flashed.send( - app, - _async_wrapper=app.ensure_sync, - message=message, - category=category, - ) - - -def get_flashed_messages( - with_categories: bool = False, category_filter: t.Iterable[str] = () -) -> list[str] | list[tuple[str, str]]: - """Pulls all flashed messages from the session and returns them. - Further calls in the same request to the function will return - the same messages. By default just the messages are returned, - but when `with_categories` is set to ``True``, the return value will - be a list of tuples in the form ``(category, message)`` instead. - - Filter the flashed messages to one or more categories by providing those - categories in `category_filter`. This allows rendering categories in - separate html blocks. The `with_categories` and `category_filter` - arguments are distinct: - - * `with_categories` controls whether categories are returned with message - text (``True`` gives a tuple, where ``False`` gives just the message text). - * `category_filter` filters the messages down to only those matching the - provided categories. - - See :doc:`/patterns/flashing` for examples. - - .. versionchanged:: 0.3 - `with_categories` parameter added. - - .. versionchanged:: 0.9 - `category_filter` parameter added. - - :param with_categories: set to ``True`` to also receive categories. - :param category_filter: filter of categories to limit return values. Only - categories in the list will be returned. - """ - flashes = request_ctx.flashes - if flashes is None: - flashes = session.pop("_flashes") if "_flashes" in session else [] - request_ctx.flashes = flashes - if category_filter: - flashes = list(filter(lambda f: f[0] in category_filter, flashes)) - if not with_categories: - return [x[1] for x in flashes] - return flashes - - -def _prepare_send_file_kwargs(**kwargs: t.Any) -> dict[str, t.Any]: - if kwargs.get("max_age") is None: - kwargs["max_age"] = current_app.get_send_file_max_age - - kwargs.update( - environ=request.environ, - use_x_sendfile=current_app.config["USE_X_SENDFILE"], - response_class=current_app.response_class, - _root_path=current_app.root_path, # type: ignore - ) - return kwargs - - -def send_file( - path_or_file: os.PathLike[t.AnyStr] | str | t.BinaryIO, - mimetype: str | None = None, - as_attachment: bool = False, - download_name: str | None = None, - conditional: bool = True, - etag: bool | str = True, - last_modified: datetime | int | float | None = None, - max_age: None | (int | t.Callable[[str | None], int | None]) = None, -) -> Response: - """Send the contents of a file to the client. - - The first argument can be a file path or a file-like object. Paths - are preferred in most cases because Werkzeug can manage the file and - get extra information from the path. Passing a file-like object - requires that the file is opened in binary mode, and is mostly - useful when building a file in memory with :class:`io.BytesIO`. - - Never pass file paths provided by a user. The path is assumed to be - trusted, so a user could craft a path to access a file you didn't - intend. Use :func:`send_from_directory` to safely serve - user-requested paths from within a directory. - - If the WSGI server sets a ``file_wrapper`` in ``environ``, it is - used, otherwise Werkzeug's built-in wrapper is used. Alternatively, - if the HTTP server supports ``X-Sendfile``, configuring Flask with - ``USE_X_SENDFILE = True`` will tell the server to send the given - path, which is much more efficient than reading it in Python. - - :param path_or_file: The path to the file to send, relative to the - current working directory if a relative path is given. - Alternatively, a file-like object opened in binary mode. Make - sure the file pointer is seeked to the start of the data. - :param mimetype: The MIME type to send for the file. If not - provided, it will try to detect it from the file name. - :param as_attachment: Indicate to a browser that it should offer to - save the file instead of displaying it. - :param download_name: The default name browsers will use when saving - the file. Defaults to the passed file name. - :param conditional: Enable conditional and range responses based on - request headers. Requires passing a file path and ``environ``. - :param etag: Calculate an ETag for the file, which requires passing - a file path. Can also be a string to use instead. - :param last_modified: The last modified time to send for the file, - in seconds. If not provided, it will try to detect it from the - file path. - :param max_age: How long the client should cache the file, in - seconds. If set, ``Cache-Control`` will be ``public``, otherwise - it will be ``no-cache`` to prefer conditional caching. - - .. versionchanged:: 2.0 - ``download_name`` replaces the ``attachment_filename`` - parameter. If ``as_attachment=False``, it is passed with - ``Content-Disposition: inline`` instead. - - .. versionchanged:: 2.0 - ``max_age`` replaces the ``cache_timeout`` parameter. - ``conditional`` is enabled and ``max_age`` is not set by - default. - - .. versionchanged:: 2.0 - ``etag`` replaces the ``add_etags`` parameter. It can be a - string to use instead of generating one. - - .. versionchanged:: 2.0 - Passing a file-like object that inherits from - :class:`~io.TextIOBase` will raise a :exc:`ValueError` rather - than sending an empty file. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionchanged:: 1.1 - ``filename`` may be a :class:`~os.PathLike` object. - - .. versionchanged:: 1.1 - Passing a :class:`~io.BytesIO` object supports range requests. - - .. versionchanged:: 1.0.3 - Filenames are encoded with ASCII instead of Latin-1 for broader - compatibility with WSGI servers. - - .. versionchanged:: 1.0 - UTF-8 filenames as specified in :rfc:`2231` are supported. - - .. versionchanged:: 0.12 - The filename is no longer automatically inferred from file - objects. If you want to use automatic MIME and etag support, - pass a filename via ``filename_or_fp`` or - ``attachment_filename``. - - .. versionchanged:: 0.12 - ``attachment_filename`` is preferred over ``filename`` for MIME - detection. - - .. versionchanged:: 0.9 - ``cache_timeout`` defaults to - :meth:`Flask.get_send_file_max_age`. - - .. versionchanged:: 0.7 - MIME guessing and etag support for file-like objects was - removed because it was unreliable. Pass a filename if you are - able to, otherwise attach an etag yourself. - - .. versionchanged:: 0.5 - The ``add_etags``, ``cache_timeout`` and ``conditional`` - parameters were added. The default behavior is to add etags. - - .. versionadded:: 0.2 - """ - return werkzeug.utils.send_file( # type: ignore[return-value] - **_prepare_send_file_kwargs( - path_or_file=path_or_file, - environ=request.environ, - mimetype=mimetype, - as_attachment=as_attachment, - download_name=download_name, - conditional=conditional, - etag=etag, - last_modified=last_modified, - max_age=max_age, - ) - ) - - -def send_from_directory( - directory: os.PathLike[str] | str, - path: os.PathLike[str] | str, - **kwargs: t.Any, -) -> Response: - """Send a file from within a directory using :func:`send_file`. - - .. code-block:: python - - @app.route("/uploads/") - def download_file(name): - return send_from_directory( - app.config['UPLOAD_FOLDER'], name, as_attachment=True - ) - - This is a secure way to serve files from a folder, such as static - files or uploads. Uses :func:`~werkzeug.security.safe_join` to - ensure the path coming from the client is not maliciously crafted to - point outside the specified directory. - - If the final path does not point to an existing regular file, - raises a 404 :exc:`~werkzeug.exceptions.NotFound` error. - - :param directory: The directory that ``path`` must be located under, - relative to the current application's root path. - :param path: The path to the file to send, relative to - ``directory``. - :param kwargs: Arguments to pass to :func:`send_file`. - - .. versionchanged:: 2.0 - ``path`` replaces the ``filename`` parameter. - - .. versionadded:: 2.0 - Moved the implementation to Werkzeug. This is now a wrapper to - pass some Flask-specific arguments. - - .. versionadded:: 0.5 - """ - return werkzeug.utils.send_from_directory( # type: ignore[return-value] - directory, path, **_prepare_send_file_kwargs(**kwargs) - ) - - -def get_root_path(import_name: str) -> str: - """Find the root path of a package, or the path that contains a - module. If it cannot be found, returns the current working - directory. - - Not to be confused with the value returned by :func:`find_package`. - - :meta private: - """ - # Module already imported and has a file attribute. Use that first. - mod = sys.modules.get(import_name) - - if mod is not None and hasattr(mod, "__file__") and mod.__file__ is not None: - return os.path.dirname(os.path.abspath(mod.__file__)) - - # Next attempt: check the loader. - try: - spec = importlib.util.find_spec(import_name) - - if spec is None: - raise ValueError - except (ImportError, ValueError): - loader = None - else: - loader = spec.loader - - # Loader does not exist or we're referring to an unloaded main - # module or a main module without path (interactive sessions), go - # with the current working directory. - if loader is None: - return os.getcwd() - - if hasattr(loader, "get_filename"): - filepath = loader.get_filename(import_name) - else: - # Fall back to imports. - __import__(import_name) - mod = sys.modules[import_name] - filepath = getattr(mod, "__file__", None) - - # If we don't have a file path it might be because it is a - # namespace package. In this case pick the root path from the - # first module that is contained in the package. - if filepath is None: - raise RuntimeError( - "No root path can be found for the provided module" - f" {import_name!r}. This can happen because the module" - " came from an import hook that does not provide file" - " name information or because it's a namespace package." - " In this case the root path needs to be explicitly" - " provided." - ) - - # filepath is import_name.py for a module, or __init__.py for a package. - return os.path.dirname(os.path.abspath(filepath)) # type: ignore[no-any-return] - - -@lru_cache(maxsize=None) -def _split_blueprint_path(name: str) -> list[str]: - out: list[str] = [name] - - if "." in name: - out.extend(_split_blueprint_path(name.rpartition(".")[0])) - - return out diff --git a/myenv/Lib/site-packages/flask/json/__init__.py b/myenv/Lib/site-packages/flask/json/__init__.py deleted file mode 100644 index c0941d0..0000000 --- a/myenv/Lib/site-packages/flask/json/__init__.py +++ /dev/null @@ -1,170 +0,0 @@ -from __future__ import annotations - -import json as _json -import typing as t - -from ..globals import current_app -from .provider import _default - -if t.TYPE_CHECKING: # pragma: no cover - from ..wrappers import Response - - -def dumps(obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.dumps() ` - method, otherwise it will use :func:`json.dumps`. - - :param obj: The data to serialize. - :param kwargs: Arguments passed to the ``dumps`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.dumps``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - if current_app: - return current_app.json.dumps(obj, **kwargs) - - kwargs.setdefault("default", _default) - return _json.dumps(obj, **kwargs) - - -def dump(obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: - """Serialize data as JSON and write to a file. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.dump() ` - method, otherwise it will use :func:`json.dump`. - - :param obj: The data to serialize. - :param fp: A file opened for writing text. Should use the UTF-8 - encoding to be valid JSON. - :param kwargs: Arguments passed to the ``dump`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.dump``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0 - Writing to a binary file, and the ``encoding`` argument, will be - removed in Flask 2.1. - """ - if current_app: - current_app.json.dump(obj, fp, **kwargs) - else: - kwargs.setdefault("default", _default) - _json.dump(obj, fp, **kwargs) - - -def loads(s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.loads() ` - method, otherwise it will use :func:`json.loads`. - - :param s: Text or UTF-8 bytes. - :param kwargs: Arguments passed to the ``loads`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.loads``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. The data must be a - string or UTF-8 bytes. - - .. versionchanged:: 1.0.3 - ``app`` can be passed directly, rather than requiring an app - context for configuration. - """ - if current_app: - return current_app.json.loads(s, **kwargs) - - return _json.loads(s, **kwargs) - - -def load(fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON read from a file. - - If :data:`~flask.current_app` is available, it will use its - :meth:`app.json.load() ` - method, otherwise it will use :func:`json.load`. - - :param fp: A file opened for reading text or UTF-8 bytes. - :param kwargs: Arguments passed to the ``load`` implementation. - - .. versionchanged:: 2.3 - The ``app`` parameter was removed. - - .. versionchanged:: 2.2 - Calls ``current_app.json.load``, allowing an app to override - the behavior. - - .. versionchanged:: 2.2 - The ``app`` parameter will be removed in Flask 2.3. - - .. versionchanged:: 2.0 - ``encoding`` will be removed in Flask 2.1. The file must be text - mode, or binary mode with UTF-8 bytes. - """ - if current_app: - return current_app.json.load(fp, **kwargs) - - return _json.load(fp, **kwargs) - - -def jsonify(*args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with the ``application/json`` - mimetype. A dict or list returned from a view will be converted to a - JSON response automatically without needing to call this. - - This requires an active request or application context, and calls - :meth:`app.json.response() `. - - In debug mode, the output is formatted with indentation to make it - easier to read. This may also be controlled by the provider. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - - .. versionchanged:: 2.2 - Calls ``current_app.json.response``, allowing an app to override - the behavior. - - .. versionchanged:: 2.0.2 - :class:`decimal.Decimal` is supported by converting to a string. - - .. versionchanged:: 0.11 - Added support for serializing top-level arrays. This was a - security risk in ancient browsers. See :ref:`security-json`. - - .. versionadded:: 0.2 - """ - return current_app.json.response(*args, **kwargs) # type: ignore[return-value] diff --git a/myenv/Lib/site-packages/flask/json/provider.py b/myenv/Lib/site-packages/flask/json/provider.py deleted file mode 100644 index f9b2e8f..0000000 --- a/myenv/Lib/site-packages/flask/json/provider.py +++ /dev/null @@ -1,215 +0,0 @@ -from __future__ import annotations - -import dataclasses -import decimal -import json -import typing as t -import uuid -import weakref -from datetime import date - -from werkzeug.http import http_date - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.sansio.response import Response - - from ..sansio.app import App - - -class JSONProvider: - """A standard set of JSON operations for an application. Subclasses - of this can be used to customize JSON behavior or use different - JSON libraries. - - To implement a provider for a specific library, subclass this base - class and implement at least :meth:`dumps` and :meth:`loads`. All - other methods have default implementations. - - To use a different provider, either subclass ``Flask`` and set - :attr:`~flask.Flask.json_provider_class` to a provider class, or set - :attr:`app.json ` to an instance of the class. - - :param app: An application instance. This will be stored as a - :class:`weakref.proxy` on the :attr:`_app` attribute. - - .. versionadded:: 2.2 - """ - - def __init__(self, app: App) -> None: - self._app: App = weakref.proxy(app) - - def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON. - - :param obj: The data to serialize. - :param kwargs: May be passed to the underlying JSON library. - """ - raise NotImplementedError - - def dump(self, obj: t.Any, fp: t.IO[str], **kwargs: t.Any) -> None: - """Serialize data as JSON and write to a file. - - :param obj: The data to serialize. - :param fp: A file opened for writing text. Should use the UTF-8 - encoding to be valid JSON. - :param kwargs: May be passed to the underlying JSON library. - """ - fp.write(self.dumps(obj, **kwargs)) - - def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON. - - :param s: Text or UTF-8 bytes. - :param kwargs: May be passed to the underlying JSON library. - """ - raise NotImplementedError - - def load(self, fp: t.IO[t.AnyStr], **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON read from a file. - - :param fp: A file opened for reading text or UTF-8 bytes. - :param kwargs: May be passed to the underlying JSON library. - """ - return self.loads(fp.read(), **kwargs) - - def _prepare_response_obj( - self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any] - ) -> t.Any: - if args and kwargs: - raise TypeError("app.json.response() takes either args or kwargs, not both") - - if not args and not kwargs: - return None - - if len(args) == 1: - return args[0] - - return args or kwargs - - def response(self, *args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with the ``application/json`` - mimetype. - - The :func:`~flask.json.jsonify` function calls this method for - the current application. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - """ - obj = self._prepare_response_obj(args, kwargs) - return self._app.response_class(self.dumps(obj), mimetype="application/json") - - -def _default(o: t.Any) -> t.Any: - if isinstance(o, date): - return http_date(o) - - if isinstance(o, (decimal.Decimal, uuid.UUID)): - return str(o) - - if dataclasses and dataclasses.is_dataclass(o): - return dataclasses.asdict(o) - - if hasattr(o, "__html__"): - return str(o.__html__()) - - raise TypeError(f"Object of type {type(o).__name__} is not JSON serializable") - - -class DefaultJSONProvider(JSONProvider): - """Provide JSON operations using Python's built-in :mod:`json` - library. Serializes the following additional data types: - - - :class:`datetime.datetime` and :class:`datetime.date` are - serialized to :rfc:`822` strings. This is the same as the HTTP - date format. - - :class:`uuid.UUID` is serialized to a string. - - :class:`dataclasses.dataclass` is passed to - :func:`dataclasses.asdict`. - - :class:`~markupsafe.Markup` (or any object with a ``__html__`` - method) will call the ``__html__`` method to get a string. - """ - - default: t.Callable[[t.Any], t.Any] = staticmethod(_default) # type: ignore[assignment] - """Apply this function to any object that :meth:`json.dumps` does - not know how to serialize. It should return a valid JSON type or - raise a ``TypeError``. - """ - - ensure_ascii = True - """Replace non-ASCII characters with escape sequences. This may be - more compatible with some clients, but can be disabled for better - performance and size. - """ - - sort_keys = True - """Sort the keys in any serialized dicts. This may be useful for - some caching situations, but can be disabled for better performance. - When enabled, keys must all be strings, they are not converted - before sorting. - """ - - compact: bool | None = None - """If ``True``, or ``None`` out of debug mode, the :meth:`response` - output will not add indentation, newlines, or spaces. If ``False``, - or ``None`` in debug mode, it will use a non-compact representation. - """ - - mimetype = "application/json" - """The mimetype set in :meth:`response`.""" - - def dumps(self, obj: t.Any, **kwargs: t.Any) -> str: - """Serialize data as JSON to a string. - - Keyword arguments are passed to :func:`json.dumps`. Sets some - parameter defaults from the :attr:`default`, - :attr:`ensure_ascii`, and :attr:`sort_keys` attributes. - - :param obj: The data to serialize. - :param kwargs: Passed to :func:`json.dumps`. - """ - kwargs.setdefault("default", self.default) - kwargs.setdefault("ensure_ascii", self.ensure_ascii) - kwargs.setdefault("sort_keys", self.sort_keys) - return json.dumps(obj, **kwargs) - - def loads(self, s: str | bytes, **kwargs: t.Any) -> t.Any: - """Deserialize data as JSON from a string or bytes. - - :param s: Text or UTF-8 bytes. - :param kwargs: Passed to :func:`json.loads`. - """ - return json.loads(s, **kwargs) - - def response(self, *args: t.Any, **kwargs: t.Any) -> Response: - """Serialize the given arguments as JSON, and return a - :class:`~flask.Response` object with it. The response mimetype - will be "application/json" and can be changed with - :attr:`mimetype`. - - If :attr:`compact` is ``False`` or debug mode is enabled, the - output will be formatted to be easier to read. - - Either positional or keyword arguments can be given, not both. - If no arguments are given, ``None`` is serialized. - - :param args: A single value to serialize, or multiple values to - treat as a list to serialize. - :param kwargs: Treat as a dict to serialize. - """ - obj = self._prepare_response_obj(args, kwargs) - dump_args: dict[str, t.Any] = {} - - if (self.compact is None and self._app.debug) or self.compact is False: - dump_args.setdefault("indent", 2) - else: - dump_args.setdefault("separators", (",", ":")) - - return self._app.response_class( - f"{self.dumps(obj, **dump_args)}\n", mimetype=self.mimetype - ) diff --git a/myenv/Lib/site-packages/flask/json/tag.py b/myenv/Lib/site-packages/flask/json/tag.py deleted file mode 100644 index 8dc3629..0000000 --- a/myenv/Lib/site-packages/flask/json/tag.py +++ /dev/null @@ -1,327 +0,0 @@ -""" -Tagged JSON -~~~~~~~~~~~ - -A compact representation for lossless serialization of non-standard JSON -types. :class:`~flask.sessions.SecureCookieSessionInterface` uses this -to serialize the session data, but it may be useful in other places. It -can be extended to support other types. - -.. autoclass:: TaggedJSONSerializer - :members: - -.. autoclass:: JSONTag - :members: - -Let's see an example that adds support for -:class:`~collections.OrderedDict`. Dicts don't have an order in JSON, so -to handle this we will dump the items as a list of ``[key, value]`` -pairs. Subclass :class:`JSONTag` and give it the new key ``' od'`` to -identify the type. The session serializer processes dicts first, so -insert the new tag at the front of the order since ``OrderedDict`` must -be processed before ``dict``. - -.. code-block:: python - - from flask.json.tag import JSONTag - - class TagOrderedDict(JSONTag): - __slots__ = ('serializer',) - key = ' od' - - def check(self, value): - return isinstance(value, OrderedDict) - - def to_json(self, value): - return [[k, self.serializer.tag(v)] for k, v in iteritems(value)] - - def to_python(self, value): - return OrderedDict(value) - - app.session_interface.serializer.register(TagOrderedDict, index=0) -""" - -from __future__ import annotations - -import typing as t -from base64 import b64decode -from base64 import b64encode -from datetime import datetime -from uuid import UUID - -from markupsafe import Markup -from werkzeug.http import http_date -from werkzeug.http import parse_date - -from ..json import dumps -from ..json import loads - - -class JSONTag: - """Base class for defining type tags for :class:`TaggedJSONSerializer`.""" - - __slots__ = ("serializer",) - - #: The tag to mark the serialized object with. If empty, this tag is - #: only used as an intermediate step during tagging. - key: str = "" - - def __init__(self, serializer: TaggedJSONSerializer) -> None: - """Create a tagger for the given serializer.""" - self.serializer = serializer - - def check(self, value: t.Any) -> bool: - """Check if the given value should be tagged by this tag.""" - raise NotImplementedError - - def to_json(self, value: t.Any) -> t.Any: - """Convert the Python object to an object that is a valid JSON type. - The tag will be added later.""" - raise NotImplementedError - - def to_python(self, value: t.Any) -> t.Any: - """Convert the JSON representation back to the correct type. The tag - will already be removed.""" - raise NotImplementedError - - def tag(self, value: t.Any) -> dict[str, t.Any]: - """Convert the value to a valid JSON type and add the tag structure - around it.""" - return {self.key: self.to_json(value)} - - -class TagDict(JSONTag): - """Tag for 1-item dicts whose only key matches a registered tag. - - Internally, the dict key is suffixed with `__`, and the suffix is removed - when deserializing. - """ - - __slots__ = () - key = " di" - - def check(self, value: t.Any) -> bool: - return ( - isinstance(value, dict) - and len(value) == 1 - and next(iter(value)) in self.serializer.tags - ) - - def to_json(self, value: t.Any) -> t.Any: - key = next(iter(value)) - return {f"{key}__": self.serializer.tag(value[key])} - - def to_python(self, value: t.Any) -> t.Any: - key = next(iter(value)) - return {key[:-2]: value[key]} - - -class PassDict(JSONTag): - __slots__ = () - - def check(self, value: t.Any) -> bool: - return isinstance(value, dict) - - def to_json(self, value: t.Any) -> t.Any: - # JSON objects may only have string keys, so don't bother tagging the - # key here. - return {k: self.serializer.tag(v) for k, v in value.items()} - - tag = to_json - - -class TagTuple(JSONTag): - __slots__ = () - key = " t" - - def check(self, value: t.Any) -> bool: - return isinstance(value, tuple) - - def to_json(self, value: t.Any) -> t.Any: - return [self.serializer.tag(item) for item in value] - - def to_python(self, value: t.Any) -> t.Any: - return tuple(value) - - -class PassList(JSONTag): - __slots__ = () - - def check(self, value: t.Any) -> bool: - return isinstance(value, list) - - def to_json(self, value: t.Any) -> t.Any: - return [self.serializer.tag(item) for item in value] - - tag = to_json - - -class TagBytes(JSONTag): - __slots__ = () - key = " b" - - def check(self, value: t.Any) -> bool: - return isinstance(value, bytes) - - def to_json(self, value: t.Any) -> t.Any: - return b64encode(value).decode("ascii") - - def to_python(self, value: t.Any) -> t.Any: - return b64decode(value) - - -class TagMarkup(JSONTag): - """Serialize anything matching the :class:`~markupsafe.Markup` API by - having a ``__html__`` method to the result of that method. Always - deserializes to an instance of :class:`~markupsafe.Markup`.""" - - __slots__ = () - key = " m" - - def check(self, value: t.Any) -> bool: - return callable(getattr(value, "__html__", None)) - - def to_json(self, value: t.Any) -> t.Any: - return str(value.__html__()) - - def to_python(self, value: t.Any) -> t.Any: - return Markup(value) - - -class TagUUID(JSONTag): - __slots__ = () - key = " u" - - def check(self, value: t.Any) -> bool: - return isinstance(value, UUID) - - def to_json(self, value: t.Any) -> t.Any: - return value.hex - - def to_python(self, value: t.Any) -> t.Any: - return UUID(value) - - -class TagDateTime(JSONTag): - __slots__ = () - key = " d" - - def check(self, value: t.Any) -> bool: - return isinstance(value, datetime) - - def to_json(self, value: t.Any) -> t.Any: - return http_date(value) - - def to_python(self, value: t.Any) -> t.Any: - return parse_date(value) - - -class TaggedJSONSerializer: - """Serializer that uses a tag system to compactly represent objects that - are not JSON types. Passed as the intermediate serializer to - :class:`itsdangerous.Serializer`. - - The following extra types are supported: - - * :class:`dict` - * :class:`tuple` - * :class:`bytes` - * :class:`~markupsafe.Markup` - * :class:`~uuid.UUID` - * :class:`~datetime.datetime` - """ - - __slots__ = ("tags", "order") - - #: Tag classes to bind when creating the serializer. Other tags can be - #: added later using :meth:`~register`. - default_tags = [ - TagDict, - PassDict, - TagTuple, - PassList, - TagBytes, - TagMarkup, - TagUUID, - TagDateTime, - ] - - def __init__(self) -> None: - self.tags: dict[str, JSONTag] = {} - self.order: list[JSONTag] = [] - - for cls in self.default_tags: - self.register(cls) - - def register( - self, - tag_class: type[JSONTag], - force: bool = False, - index: int | None = None, - ) -> None: - """Register a new tag with this serializer. - - :param tag_class: tag class to register. Will be instantiated with this - serializer instance. - :param force: overwrite an existing tag. If false (default), a - :exc:`KeyError` is raised. - :param index: index to insert the new tag in the tag order. Useful when - the new tag is a special case of an existing tag. If ``None`` - (default), the tag is appended to the end of the order. - - :raise KeyError: if the tag key is already registered and ``force`` is - not true. - """ - tag = tag_class(self) - key = tag.key - - if key: - if not force and key in self.tags: - raise KeyError(f"Tag '{key}' is already registered.") - - self.tags[key] = tag - - if index is None: - self.order.append(tag) - else: - self.order.insert(index, tag) - - def tag(self, value: t.Any) -> t.Any: - """Convert a value to a tagged representation if necessary.""" - for tag in self.order: - if tag.check(value): - return tag.tag(value) - - return value - - def untag(self, value: dict[str, t.Any]) -> t.Any: - """Convert a tagged representation back to the original type.""" - if len(value) != 1: - return value - - key = next(iter(value)) - - if key not in self.tags: - return value - - return self.tags[key].to_python(value[key]) - - def _untag_scan(self, value: t.Any) -> t.Any: - if isinstance(value, dict): - # untag each item recursively - value = {k: self._untag_scan(v) for k, v in value.items()} - # untag the dict itself - value = self.untag(value) - elif isinstance(value, list): - # untag each item recursively - value = [self._untag_scan(item) for item in value] - - return value - - def dumps(self, value: t.Any) -> str: - """Tag the value and dump it to a compact JSON string.""" - return dumps(self.tag(value), separators=(",", ":")) - - def loads(self, value: str) -> t.Any: - """Load data from a JSON string and deserialized any tagged objects.""" - return self._untag_scan(loads(value)) diff --git a/myenv/Lib/site-packages/flask/logging.py b/myenv/Lib/site-packages/flask/logging.py deleted file mode 100644 index 0cb8f43..0000000 --- a/myenv/Lib/site-packages/flask/logging.py +++ /dev/null @@ -1,79 +0,0 @@ -from __future__ import annotations - -import logging -import sys -import typing as t - -from werkzeug.local import LocalProxy - -from .globals import request - -if t.TYPE_CHECKING: # pragma: no cover - from .sansio.app import App - - -@LocalProxy -def wsgi_errors_stream() -> t.TextIO: - """Find the most appropriate error stream for the application. If a request - is active, log to ``wsgi.errors``, otherwise use ``sys.stderr``. - - If you configure your own :class:`logging.StreamHandler`, you may want to - use this for the stream. If you are using file or dict configuration and - can't import this directly, you can refer to it as - ``ext://flask.logging.wsgi_errors_stream``. - """ - if request: - return request.environ["wsgi.errors"] # type: ignore[no-any-return] - - return sys.stderr - - -def has_level_handler(logger: logging.Logger) -> bool: - """Check if there is a handler in the logging chain that will handle the - given logger's :meth:`effective level <~logging.Logger.getEffectiveLevel>`. - """ - level = logger.getEffectiveLevel() - current = logger - - while current: - if any(handler.level <= level for handler in current.handlers): - return True - - if not current.propagate: - break - - current = current.parent # type: ignore - - return False - - -#: Log messages to :func:`~flask.logging.wsgi_errors_stream` with the format -#: ``[%(asctime)s] %(levelname)s in %(module)s: %(message)s``. -default_handler = logging.StreamHandler(wsgi_errors_stream) # type: ignore -default_handler.setFormatter( - logging.Formatter("[%(asctime)s] %(levelname)s in %(module)s: %(message)s") -) - - -def create_logger(app: App) -> logging.Logger: - """Get the Flask app's logger and configure it if needed. - - The logger name will be the same as - :attr:`app.import_name `. - - When :attr:`~flask.Flask.debug` is enabled, set the logger level to - :data:`logging.DEBUG` if it is not set. - - If there is no handler for the logger's effective level, add a - :class:`~logging.StreamHandler` for - :func:`~flask.logging.wsgi_errors_stream` with a basic format. - """ - logger = logging.getLogger(app.name) - - if app.debug and not logger.level: - logger.setLevel(logging.DEBUG) - - if not has_level_handler(logger): - logger.addHandler(default_handler) - - return logger diff --git a/myenv/Lib/site-packages/flask/py.typed b/myenv/Lib/site-packages/flask/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/flask/sansio/README.md b/myenv/Lib/site-packages/flask/sansio/README.md deleted file mode 100644 index 623ac19..0000000 --- a/myenv/Lib/site-packages/flask/sansio/README.md +++ /dev/null @@ -1,6 +0,0 @@ -# Sansio - -This folder contains code that can be used by alternative Flask -implementations, for example Quart. The code therefore cannot do any -IO, nor be part of a likely IO path. Finally this code cannot use the -Flask globals. diff --git a/myenv/Lib/site-packages/flask/sansio/app.py b/myenv/Lib/site-packages/flask/sansio/app.py deleted file mode 100644 index 01fd5db..0000000 --- a/myenv/Lib/site-packages/flask/sansio/app.py +++ /dev/null @@ -1,964 +0,0 @@ -from __future__ import annotations - -import logging -import os -import sys -import typing as t -from datetime import timedelta -from itertools import chain - -from werkzeug.exceptions import Aborter -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import BadRequestKeyError -from werkzeug.routing import BuildError -from werkzeug.routing import Map -from werkzeug.routing import Rule -from werkzeug.sansio.response import Response -from werkzeug.utils import cached_property -from werkzeug.utils import redirect as _wz_redirect - -from .. import typing as ft -from ..config import Config -from ..config import ConfigAttribute -from ..ctx import _AppCtxGlobals -from ..helpers import _split_blueprint_path -from ..helpers import get_debug_flag -from ..json.provider import DefaultJSONProvider -from ..json.provider import JSONProvider -from ..logging import create_logger -from ..templating import DispatchingJinjaLoader -from ..templating import Environment -from .scaffold import _endpoint_from_view_func -from .scaffold import find_package -from .scaffold import Scaffold -from .scaffold import setupmethod - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.wrappers import Response as BaseResponse - - from ..testing import FlaskClient - from ..testing import FlaskCliRunner - from .blueprints import Blueprint - -T_shell_context_processor = t.TypeVar( - "T_shell_context_processor", bound=ft.ShellContextProcessorCallable -) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) - - -def _make_timedelta(value: timedelta | int | None) -> timedelta | None: - if value is None or isinstance(value, timedelta): - return value - - return timedelta(seconds=value) - - -class App(Scaffold): - """The flask object implements a WSGI application and acts as the central - object. It is passed the name of the module or package of the - application. Once it is created it will act as a central registry for - the view functions, the URL rules, template configuration and much more. - - The name of the package is used to resolve resources from inside the - package or the folder the module is contained in depending on if the - package parameter resolves to an actual python package (a folder with - an :file:`__init__.py` file inside) or a standard module (just a ``.py`` file). - - For more information about resource loading, see :func:`open_resource`. - - Usually you create a :class:`Flask` instance in your main module or - in the :file:`__init__.py` file of your package like this:: - - from flask import Flask - app = Flask(__name__) - - .. admonition:: About the First Parameter - - The idea of the first parameter is to give Flask an idea of what - belongs to your application. This name is used to find resources - on the filesystem, can be used by extensions to improve debugging - information and a lot more. - - So it's important what you provide there. If you are using a single - module, `__name__` is always the correct value. If you however are - using a package, it's usually recommended to hardcode the name of - your package there. - - For example if your application is defined in :file:`yourapplication/app.py` - you should create it with one of the two versions below:: - - app = Flask('yourapplication') - app = Flask(__name__.split('.')[0]) - - Why is that? The application will work even with `__name__`, thanks - to how resources are looked up. However it will make debugging more - painful. Certain extensions can make assumptions based on the - import name of your application. For example the Flask-SQLAlchemy - extension will look for the code in your application that triggered - an SQL query in debug mode. If the import name is not properly set - up, that debugging information is lost. (For example it would only - pick up SQL queries in `yourapplication.app` and not - `yourapplication.views.frontend`) - - .. versionadded:: 0.7 - The `static_url_path`, `static_folder`, and `template_folder` - parameters were added. - - .. versionadded:: 0.8 - The `instance_path` and `instance_relative_config` parameters were - added. - - .. versionadded:: 0.11 - The `root_path` parameter was added. - - .. versionadded:: 1.0 - The ``host_matching`` and ``static_host`` parameters were added. - - .. versionadded:: 1.0 - The ``subdomain_matching`` parameter was added. Subdomain - matching needs to be enabled manually now. Setting - :data:`SERVER_NAME` does not implicitly enable it. - - :param import_name: the name of the application package - :param static_url_path: can be used to specify a different path for the - static files on the web. Defaults to the name - of the `static_folder` folder. - :param static_folder: The folder with static files that is served at - ``static_url_path``. Relative to the application ``root_path`` - or an absolute path. Defaults to ``'static'``. - :param static_host: the host to use when adding the static route. - Defaults to None. Required when using ``host_matching=True`` - with a ``static_folder`` configured. - :param host_matching: set ``url_map.host_matching`` attribute. - Defaults to False. - :param subdomain_matching: consider the subdomain relative to - :data:`SERVER_NAME` when matching routes. Defaults to False. - :param template_folder: the folder that contains the templates that should - be used by the application. Defaults to - ``'templates'`` folder in the root path of the - application. - :param instance_path: An alternative instance path for the application. - By default the folder ``'instance'`` next to the - package or module is assumed to be the instance - path. - :param instance_relative_config: if set to ``True`` relative filenames - for loading the config are assumed to - be relative to the instance path instead - of the application root. - :param root_path: The path to the root of the application files. - This should only be set manually when it can't be detected - automatically, such as for namespace packages. - """ - - #: The class of the object assigned to :attr:`aborter`, created by - #: :meth:`create_aborter`. That object is called by - #: :func:`flask.abort` to raise HTTP errors, and can be - #: called directly as well. - #: - #: Defaults to :class:`werkzeug.exceptions.Aborter`. - #: - #: .. versionadded:: 2.2 - aborter_class = Aborter - - #: The class that is used for the Jinja environment. - #: - #: .. versionadded:: 0.11 - jinja_environment = Environment - - #: The class that is used for the :data:`~flask.g` instance. - #: - #: Example use cases for a custom class: - #: - #: 1. Store arbitrary attributes on flask.g. - #: 2. Add a property for lazy per-request database connectors. - #: 3. Return None instead of AttributeError on unexpected attributes. - #: 4. Raise exception if an unexpected attr is set, a "controlled" flask.g. - #: - #: In Flask 0.9 this property was called `request_globals_class` but it - #: was changed in 0.10 to :attr:`app_ctx_globals_class` because the - #: flask.g object is now application context scoped. - #: - #: .. versionadded:: 0.10 - app_ctx_globals_class = _AppCtxGlobals - - #: The class that is used for the ``config`` attribute of this app. - #: Defaults to :class:`~flask.Config`. - #: - #: Example use cases for a custom class: - #: - #: 1. Default values for certain config options. - #: 2. Access to config values through attributes in addition to keys. - #: - #: .. versionadded:: 0.11 - config_class = Config - - #: The testing flag. Set this to ``True`` to enable the test mode of - #: Flask extensions (and in the future probably also Flask itself). - #: For example this might activate test helpers that have an - #: additional runtime cost which should not be enabled by default. - #: - #: If this is enabled and PROPAGATE_EXCEPTIONS is not changed from the - #: default it's implicitly enabled. - #: - #: This attribute can also be configured from the config with the - #: ``TESTING`` configuration key. Defaults to ``False``. - testing = ConfigAttribute[bool]("TESTING") - - #: If a secret key is set, cryptographic components can use this to - #: sign cookies and other things. Set this to a complex random value - #: when you want to use the secure cookie for instance. - #: - #: This attribute can also be configured from the config with the - #: :data:`SECRET_KEY` configuration key. Defaults to ``None``. - secret_key = ConfigAttribute[t.Union[str, bytes, None]]("SECRET_KEY") - - #: A :class:`~datetime.timedelta` which is used to set the expiration - #: date of a permanent session. The default is 31 days which makes a - #: permanent session survive for roughly one month. - #: - #: This attribute can also be configured from the config with the - #: ``PERMANENT_SESSION_LIFETIME`` configuration key. Defaults to - #: ``timedelta(days=31)`` - permanent_session_lifetime = ConfigAttribute[timedelta]( - "PERMANENT_SESSION_LIFETIME", - get_converter=_make_timedelta, # type: ignore[arg-type] - ) - - json_provider_class: type[JSONProvider] = DefaultJSONProvider - """A subclass of :class:`~flask.json.provider.JSONProvider`. An - instance is created and assigned to :attr:`app.json` when creating - the app. - - The default, :class:`~flask.json.provider.DefaultJSONProvider`, uses - Python's built-in :mod:`json` library. A different provider can use - a different JSON library. - - .. versionadded:: 2.2 - """ - - #: Options that are passed to the Jinja environment in - #: :meth:`create_jinja_environment`. Changing these options after - #: the environment is created (accessing :attr:`jinja_env`) will - #: have no effect. - #: - #: .. versionchanged:: 1.1.0 - #: This is a ``dict`` instead of an ``ImmutableDict`` to allow - #: easier configuration. - #: - jinja_options: dict[str, t.Any] = {} - - #: The rule object to use for URL rules created. This is used by - #: :meth:`add_url_rule`. Defaults to :class:`werkzeug.routing.Rule`. - #: - #: .. versionadded:: 0.7 - url_rule_class = Rule - - #: The map object to use for storing the URL rules and routing - #: configuration parameters. Defaults to :class:`werkzeug.routing.Map`. - #: - #: .. versionadded:: 1.1.0 - url_map_class = Map - - #: The :meth:`test_client` method creates an instance of this test - #: client class. Defaults to :class:`~flask.testing.FlaskClient`. - #: - #: .. versionadded:: 0.7 - test_client_class: type[FlaskClient] | None = None - - #: The :class:`~click.testing.CliRunner` subclass, by default - #: :class:`~flask.testing.FlaskCliRunner` that is used by - #: :meth:`test_cli_runner`. Its ``__init__`` method should take a - #: Flask app object as the first argument. - #: - #: .. versionadded:: 1.0 - test_cli_runner_class: type[FlaskCliRunner] | None = None - - default_config: dict[str, t.Any] - response_class: type[Response] - - def __init__( - self, - import_name: str, - static_url_path: str | None = None, - static_folder: str | os.PathLike[str] | None = "static", - static_host: str | None = None, - host_matching: bool = False, - subdomain_matching: bool = False, - template_folder: str | os.PathLike[str] | None = "templates", - instance_path: str | None = None, - instance_relative_config: bool = False, - root_path: str | None = None, - ): - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if instance_path is None: - instance_path = self.auto_find_instance_path() - elif not os.path.isabs(instance_path): - raise ValueError( - "If an instance path is provided it must be absolute." - " A relative path was given instead." - ) - - #: Holds the path to the instance folder. - #: - #: .. versionadded:: 0.8 - self.instance_path = instance_path - - #: The configuration dictionary as :class:`Config`. This behaves - #: exactly like a regular dictionary but supports additional methods - #: to load a config from files. - self.config = self.make_config(instance_relative_config) - - #: An instance of :attr:`aborter_class` created by - #: :meth:`make_aborter`. This is called by :func:`flask.abort` - #: to raise HTTP errors, and can be called directly as well. - #: - #: .. versionadded:: 2.2 - #: Moved from ``flask.abort``, which calls this object. - self.aborter = self.make_aborter() - - self.json: JSONProvider = self.json_provider_class(self) - """Provides access to JSON methods. Functions in ``flask.json`` - will call methods on this provider when the application context - is active. Used for handling JSON requests and responses. - - An instance of :attr:`json_provider_class`. Can be customized by - changing that attribute on a subclass, or by assigning to this - attribute afterwards. - - The default, :class:`~flask.json.provider.DefaultJSONProvider`, - uses Python's built-in :mod:`json` library. A different provider - can use a different JSON library. - - .. versionadded:: 2.2 - """ - - #: A list of functions that are called by - #: :meth:`handle_url_build_error` when :meth:`.url_for` raises a - #: :exc:`~werkzeug.routing.BuildError`. Each function is called - #: with ``error``, ``endpoint`` and ``values``. If a function - #: returns ``None`` or raises a ``BuildError``, it is skipped. - #: Otherwise, its return value is returned by ``url_for``. - #: - #: .. versionadded:: 0.9 - self.url_build_error_handlers: list[ - t.Callable[[Exception, str, dict[str, t.Any]], str] - ] = [] - - #: A list of functions that are called when the application context - #: is destroyed. Since the application context is also torn down - #: if the request ends this is the place to store code that disconnects - #: from databases. - #: - #: .. versionadded:: 0.9 - self.teardown_appcontext_funcs: list[ft.TeardownCallable] = [] - - #: A list of shell context processor functions that should be run - #: when a shell context is created. - #: - #: .. versionadded:: 0.11 - self.shell_context_processors: list[ft.ShellContextProcessorCallable] = [] - - #: Maps registered blueprint names to blueprint objects. The - #: dict retains the order the blueprints were registered in. - #: Blueprints can be registered multiple times, this dict does - #: not track how often they were attached. - #: - #: .. versionadded:: 0.7 - self.blueprints: dict[str, Blueprint] = {} - - #: a place where extensions can store application specific state. For - #: example this is where an extension could store database engines and - #: similar things. - #: - #: The key must match the name of the extension module. For example in - #: case of a "Flask-Foo" extension in `flask_foo`, the key would be - #: ``'foo'``. - #: - #: .. versionadded:: 0.7 - self.extensions: dict[str, t.Any] = {} - - #: The :class:`~werkzeug.routing.Map` for this instance. You can use - #: this to change the routing converters after the class was created - #: but before any routes are connected. Example:: - #: - #: from werkzeug.routing import BaseConverter - #: - #: class ListConverter(BaseConverter): - #: def to_python(self, value): - #: return value.split(',') - #: def to_url(self, values): - #: return ','.join(super(ListConverter, self).to_url(value) - #: for value in values) - #: - #: app = Flask(__name__) - #: app.url_map.converters['list'] = ListConverter - self.url_map = self.url_map_class(host_matching=host_matching) - - self.subdomain_matching = subdomain_matching - - # tracks internally if the application already handled at least one - # request. - self._got_first_request = False - - def _check_setup_finished(self, f_name: str) -> None: - if self._got_first_request: - raise AssertionError( - f"The setup method '{f_name}' can no longer be called" - " on the application. It has already handled its first" - " request, any changes will not be applied" - " consistently.\n" - "Make sure all imports, decorators, functions, etc." - " needed to set up the application are done before" - " running it." - ) - - @cached_property - def name(self) -> str: # type: ignore - """The name of the application. This is usually the import name - with the difference that it's guessed from the run file if the - import name is main. This name is used as a display name when - Flask needs the name of the application. It can be set and overridden - to change the value. - - .. versionadded:: 0.8 - """ - if self.import_name == "__main__": - fn: str | None = getattr(sys.modules["__main__"], "__file__", None) - if fn is None: - return "__main__" - return os.path.splitext(os.path.basename(fn))[0] - return self.import_name - - @cached_property - def logger(self) -> logging.Logger: - """A standard Python :class:`~logging.Logger` for the app, with - the same name as :attr:`name`. - - In debug mode, the logger's :attr:`~logging.Logger.level` will - be set to :data:`~logging.DEBUG`. - - If there are no handlers configured, a default handler will be - added. See :doc:`/logging` for more information. - - .. versionchanged:: 1.1.0 - The logger takes the same name as :attr:`name` rather than - hard-coding ``"flask.app"``. - - .. versionchanged:: 1.0.0 - Behavior was simplified. The logger is always named - ``"flask.app"``. The level is only set during configuration, - it doesn't check ``app.debug`` each time. Only one format is - used, not different ones depending on ``app.debug``. No - handlers are removed, and a handler is only added if no - handlers are already configured. - - .. versionadded:: 0.3 - """ - return create_logger(self) - - @cached_property - def jinja_env(self) -> Environment: - """The Jinja environment used to load templates. - - The environment is created the first time this property is - accessed. Changing :attr:`jinja_options` after that will have no - effect. - """ - return self.create_jinja_environment() - - def create_jinja_environment(self) -> Environment: - raise NotImplementedError() - - def make_config(self, instance_relative: bool = False) -> Config: - """Used to create the config attribute by the Flask constructor. - The `instance_relative` parameter is passed in from the constructor - of Flask (there named `instance_relative_config`) and indicates if - the config should be relative to the instance path or the root path - of the application. - - .. versionadded:: 0.8 - """ - root_path = self.root_path - if instance_relative: - root_path = self.instance_path - defaults = dict(self.default_config) - defaults["DEBUG"] = get_debug_flag() - return self.config_class(root_path, defaults) - - def make_aborter(self) -> Aborter: - """Create the object to assign to :attr:`aborter`. That object - is called by :func:`flask.abort` to raise HTTP errors, and can - be called directly as well. - - By default, this creates an instance of :attr:`aborter_class`, - which defaults to :class:`werkzeug.exceptions.Aborter`. - - .. versionadded:: 2.2 - """ - return self.aborter_class() - - def auto_find_instance_path(self) -> str: - """Tries to locate the instance path if it was not provided to the - constructor of the application class. It will basically calculate - the path to a folder named ``instance`` next to your main file or - the package. - - .. versionadded:: 0.8 - """ - prefix, package_path = find_package(self.import_name) - if prefix is None: - return os.path.join(package_path, "instance") - return os.path.join(prefix, "var", f"{self.name}-instance") - - def create_global_jinja_loader(self) -> DispatchingJinjaLoader: - """Creates the loader for the Jinja2 environment. Can be used to - override just the loader and keeping the rest unchanged. It's - discouraged to override this function. Instead one should override - the :meth:`jinja_loader` function instead. - - The global loader dispatches between the loaders of the application - and the individual blueprints. - - .. versionadded:: 0.7 - """ - return DispatchingJinjaLoader(self) - - def select_jinja_autoescape(self, filename: str) -> bool: - """Returns ``True`` if autoescaping should be active for the given - template name. If no template name is given, returns `True`. - - .. versionchanged:: 2.2 - Autoescaping is now enabled by default for ``.svg`` files. - - .. versionadded:: 0.5 - """ - if filename is None: - return True - return filename.endswith((".html", ".htm", ".xml", ".xhtml", ".svg")) - - @property - def debug(self) -> bool: - """Whether debug mode is enabled. When using ``flask run`` to start the - development server, an interactive debugger will be shown for unhandled - exceptions, and the server will be reloaded when code changes. This maps to the - :data:`DEBUG` config key. It may not behave as expected if set late. - - **Do not enable debug mode when deploying in production.** - - Default: ``False`` - """ - return self.config["DEBUG"] # type: ignore[no-any-return] - - @debug.setter - def debug(self, value: bool) -> None: - self.config["DEBUG"] = value - - if self.config["TEMPLATES_AUTO_RELOAD"] is None: - self.jinja_env.auto_reload = value - - @setupmethod - def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on the application. Keyword - arguments passed to this method will override the defaults set on the - blueprint. - - Calls the blueprint's :meth:`~flask.Blueprint.register` method after - recording the blueprint in the application's :attr:`blueprints`. - - :param blueprint: The blueprint to register. - :param url_prefix: Blueprint routes will be prefixed with this. - :param subdomain: Blueprint routes will match on this subdomain. - :param url_defaults: Blueprint routes will use these default values for - view arguments. - :param options: Additional keyword arguments are passed to - :class:`~flask.blueprints.BlueprintSetupState`. They can be - accessed in :meth:`~flask.Blueprint.record` callbacks. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 0.7 - """ - blueprint.register(self, options) - - def iter_blueprints(self) -> t.ValuesView[Blueprint]: - """Iterates over all blueprints by the order they were registered. - - .. versionadded:: 0.11 - """ - return self.blueprints.values() - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - options["endpoint"] = endpoint - methods = options.pop("methods", None) - - # if the methods are not given and the view_func object knows its - # methods we can use that instead. If neither exists, we go with - # a tuple of only ``GET`` as default. - if methods is None: - methods = getattr(view_func, "methods", None) or ("GET",) - if isinstance(methods, str): - raise TypeError( - "Allowed methods must be a list of strings, for" - ' example: @app.route(..., methods=["POST"])' - ) - methods = {item.upper() for item in methods} - - # Methods that should always be added - required_methods = set(getattr(view_func, "required_methods", ())) - - # starting with Flask 0.8 the view_func object can disable and - # force-enable the automatic options handling. - if provide_automatic_options is None: - provide_automatic_options = getattr( - view_func, "provide_automatic_options", None - ) - - if provide_automatic_options is None: - if "OPTIONS" not in methods: - provide_automatic_options = True - required_methods.add("OPTIONS") - else: - provide_automatic_options = False - - # Add the required methods now. - methods |= required_methods - - rule_obj = self.url_rule_class(rule, methods=methods, **options) - rule_obj.provide_automatic_options = provide_automatic_options # type: ignore[attr-defined] - - self.url_map.add(rule_obj) - if view_func is not None: - old_func = self.view_functions.get(endpoint) - if old_func is not None and old_func != view_func: - raise AssertionError( - "View function mapping is overwriting an existing" - f" endpoint function: {endpoint}" - ) - self.view_functions[endpoint] = view_func - - @setupmethod - def template_filter( - self, name: str | None = None - ) -> t.Callable[[T_template_filter], T_template_filter]: - """A decorator that is used to register custom template filter. - You can specify a name for the filter, otherwise the function - name will be used. Example:: - - @app.template_filter() - def reverse(s): - return s[::-1] - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: T_template_filter) -> T_template_filter: - self.add_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_filter( - self, f: ft.TemplateFilterCallable, name: str | None = None - ) -> None: - """Register a custom template filter. Works exactly like the - :meth:`template_filter` decorator. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - self.jinja_env.filters[name or f.__name__] = f - - @setupmethod - def template_test( - self, name: str | None = None - ) -> t.Callable[[T_template_test], T_template_test]: - """A decorator that is used to register custom template test. - You can specify a name for the test, otherwise the function - name will be used. Example:: - - @app.template_test() - def is_prime(n): - if n == 2: - return True - for i in range(2, int(math.ceil(math.sqrt(n))) + 1): - if n % i == 0: - return False - return True - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: T_template_test) -> T_template_test: - self.add_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_test( - self, f: ft.TemplateTestCallable, name: str | None = None - ) -> None: - """Register a custom template test. Works exactly like the - :meth:`template_test` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - self.jinja_env.tests[name or f.__name__] = f - - @setupmethod - def template_global( - self, name: str | None = None - ) -> t.Callable[[T_template_global], T_template_global]: - """A decorator that is used to register a custom template global function. - You can specify a name for the global function, otherwise the function - name will be used. Example:: - - @app.template_global() - def double(n): - return 2 * n - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - - def decorator(f: T_template_global) -> T_template_global: - self.add_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_template_global( - self, f: ft.TemplateGlobalCallable, name: str | None = None - ) -> None: - """Register a custom template global function. Works exactly like the - :meth:`template_global` decorator. - - .. versionadded:: 0.10 - - :param name: the optional name of the global function, otherwise the - function name will be used. - """ - self.jinja_env.globals[name or f.__name__] = f - - @setupmethod - def teardown_appcontext(self, f: T_teardown) -> T_teardown: - """Registers a function to be called when the application - context is popped. The application context is typically popped - after the request context for each request, at the end of CLI - commands, or after a manually pushed context ends. - - .. code-block:: python - - with app.app_context(): - ... - - When the ``with`` block exits (or ``ctx.pop()`` is called), the - teardown functions are called just before the app context is - made inactive. Since a request context typically also manages an - application context it would also be called when you pop a - request context. - - When a teardown function was called because of an unhandled - exception it will be passed an error object. If an - :meth:`errorhandler` is registered, it will handle the exception - and the teardown will not receive it. - - Teardown functions must avoid raising exceptions. If they - execute code that might fail they must surround that code with a - ``try``/``except`` block and log any errors. - - The return values of teardown functions are ignored. - - .. versionadded:: 0.9 - """ - self.teardown_appcontext_funcs.append(f) - return f - - @setupmethod - def shell_context_processor( - self, f: T_shell_context_processor - ) -> T_shell_context_processor: - """Registers a shell context processor function. - - .. versionadded:: 0.11 - """ - self.shell_context_processors.append(f) - return f - - def _find_error_handler( - self, e: Exception, blueprints: list[str] - ) -> ft.ErrorHandlerCallable | None: - """Return a registered error handler for an exception in this order: - blueprint handler for a specific code, app handler for a specific code, - blueprint handler for an exception class, app handler for an exception - class, or ``None`` if a suitable handler is not found. - """ - exc_class, code = self._get_exc_class_and_code(type(e)) - names = (*blueprints, None) - - for c in (code, None) if code is not None else (None,): - for name in names: - handler_map = self.error_handler_spec[name][c] - - if not handler_map: - continue - - for cls in exc_class.__mro__: - handler = handler_map.get(cls) - - if handler is not None: - return handler - return None - - def trap_http_exception(self, e: Exception) -> bool: - """Checks if an HTTP exception should be trapped or not. By default - this will return ``False`` for all exceptions except for a bad request - key error if ``TRAP_BAD_REQUEST_ERRORS`` is set to ``True``. It - also returns ``True`` if ``TRAP_HTTP_EXCEPTIONS`` is set to ``True``. - - This is called for all HTTP exceptions raised by a view function. - If it returns ``True`` for any exception the error handler for this - exception is not called and it shows up as regular exception in the - traceback. This is helpful for debugging implicitly raised HTTP - exceptions. - - .. versionchanged:: 1.0 - Bad request errors are not trapped by default in debug mode. - - .. versionadded:: 0.8 - """ - if self.config["TRAP_HTTP_EXCEPTIONS"]: - return True - - trap_bad_request = self.config["TRAP_BAD_REQUEST_ERRORS"] - - # if unset, trap key errors in debug mode - if ( - trap_bad_request is None - and self.debug - and isinstance(e, BadRequestKeyError) - ): - return True - - if trap_bad_request: - return isinstance(e, BadRequest) - - return False - - def should_ignore_error(self, error: BaseException | None) -> bool: - """This is called to figure out if an error should be ignored - or not as far as the teardown system is concerned. If this - function returns ``True`` then the teardown handlers will not be - passed the error. - - .. versionadded:: 0.10 - """ - return False - - def redirect(self, location: str, code: int = 302) -> BaseResponse: - """Create a redirect response object. - - This is called by :func:`flask.redirect`, and can be called - directly as well. - - :param location: The URL to redirect to. - :param code: The status code for the redirect. - - .. versionadded:: 2.2 - Moved from ``flask.redirect``, which calls this method. - """ - return _wz_redirect( - location, - code=code, - Response=self.response_class, # type: ignore[arg-type] - ) - - def inject_url_defaults(self, endpoint: str, values: dict[str, t.Any]) -> None: - """Injects the URL defaults for the given endpoint directly into - the values dictionary passed. This is used internally and - automatically called on URL building. - - .. versionadded:: 0.7 - """ - names: t.Iterable[str | None] = (None,) - - # url_for may be called outside a request context, parse the - # passed endpoint instead of using request.blueprints. - if "." in endpoint: - names = chain( - names, reversed(_split_blueprint_path(endpoint.rpartition(".")[0])) - ) - - for name in names: - if name in self.url_default_functions: - for func in self.url_default_functions[name]: - func(endpoint, values) - - def handle_url_build_error( - self, error: BuildError, endpoint: str, values: dict[str, t.Any] - ) -> str: - """Called by :meth:`.url_for` if a - :exc:`~werkzeug.routing.BuildError` was raised. If this returns - a value, it will be returned by ``url_for``, otherwise the error - will be re-raised. - - Each function in :attr:`url_build_error_handlers` is called with - ``error``, ``endpoint`` and ``values``. If a function returns - ``None`` or raises a ``BuildError``, it is skipped. Otherwise, - its return value is returned by ``url_for``. - - :param error: The active ``BuildError`` being handled. - :param endpoint: The endpoint being built. - :param values: The keyword arguments passed to ``url_for``. - """ - for handler in self.url_build_error_handlers: - try: - rv = handler(error, endpoint, values) - except BuildError as e: - # make error available outside except block - error = e - else: - if rv is not None: - return rv - - # Re-raise if called with an active exception, otherwise raise - # the passed in exception. - if error is sys.exc_info()[1]: - raise - - raise error diff --git a/myenv/Lib/site-packages/flask/sansio/blueprints.py b/myenv/Lib/site-packages/flask/sansio/blueprints.py deleted file mode 100644 index 4f912cc..0000000 --- a/myenv/Lib/site-packages/flask/sansio/blueprints.py +++ /dev/null @@ -1,632 +0,0 @@ -from __future__ import annotations - -import os -import typing as t -from collections import defaultdict -from functools import update_wrapper - -from .. import typing as ft -from .scaffold import _endpoint_from_view_func -from .scaffold import _sentinel -from .scaffold import Scaffold -from .scaffold import setupmethod - -if t.TYPE_CHECKING: # pragma: no cover - from .app import App - -DeferredSetupFunction = t.Callable[["BlueprintSetupState"], None] -T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable[t.Any]) -T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) -T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_context_processor = t.TypeVar( - "T_template_context_processor", bound=ft.TemplateContextProcessorCallable -) -T_template_filter = t.TypeVar("T_template_filter", bound=ft.TemplateFilterCallable) -T_template_global = t.TypeVar("T_template_global", bound=ft.TemplateGlobalCallable) -T_template_test = t.TypeVar("T_template_test", bound=ft.TemplateTestCallable) -T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) -T_url_value_preprocessor = t.TypeVar( - "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable -) - - -class BlueprintSetupState: - """Temporary holder object for registering a blueprint with the - application. An instance of this class is created by the - :meth:`~flask.Blueprint.make_setup_state` method and later passed - to all register callback functions. - """ - - def __init__( - self, - blueprint: Blueprint, - app: App, - options: t.Any, - first_registration: bool, - ) -> None: - #: a reference to the current application - self.app = app - - #: a reference to the blueprint that created this setup state. - self.blueprint = blueprint - - #: a dictionary with all options that were passed to the - #: :meth:`~flask.Flask.register_blueprint` method. - self.options = options - - #: as blueprints can be registered multiple times with the - #: application and not everything wants to be registered - #: multiple times on it, this attribute can be used to figure - #: out if the blueprint was registered in the past already. - self.first_registration = first_registration - - subdomain = self.options.get("subdomain") - if subdomain is None: - subdomain = self.blueprint.subdomain - - #: The subdomain that the blueprint should be active for, ``None`` - #: otherwise. - self.subdomain = subdomain - - url_prefix = self.options.get("url_prefix") - if url_prefix is None: - url_prefix = self.blueprint.url_prefix - #: The prefix that should be used for all URLs defined on the - #: blueprint. - self.url_prefix = url_prefix - - self.name = self.options.get("name", blueprint.name) - self.name_prefix = self.options.get("name_prefix", "") - - #: A dictionary with URL defaults that is added to each and every - #: URL that was defined with the blueprint. - self.url_defaults = dict(self.blueprint.url_values_defaults) - self.url_defaults.update(self.options.get("url_defaults", ())) - - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - **options: t.Any, - ) -> None: - """A helper method to register a rule (and optionally a view function) - to the application. The endpoint is automatically prefixed with the - blueprint's name. - """ - if self.url_prefix is not None: - if rule: - rule = "/".join((self.url_prefix.rstrip("/"), rule.lstrip("/"))) - else: - rule = self.url_prefix - options.setdefault("subdomain", self.subdomain) - if endpoint is None: - endpoint = _endpoint_from_view_func(view_func) # type: ignore - defaults = self.url_defaults - if "defaults" in options: - defaults = dict(defaults, **options.pop("defaults")) - - self.app.add_url_rule( - rule, - f"{self.name_prefix}.{self.name}.{endpoint}".lstrip("."), - view_func, - defaults=defaults, - **options, - ) - - -class Blueprint(Scaffold): - """Represents a blueprint, a collection of routes and other - app-related functions that can be registered on a real application - later. - - A blueprint is an object that allows defining application functions - without requiring an application object ahead of time. It uses the - same decorators as :class:`~flask.Flask`, but defers the need for an - application by recording them for later registration. - - Decorating a function with a blueprint creates a deferred function - that is called with :class:`~flask.blueprints.BlueprintSetupState` - when the blueprint is registered on an application. - - See :doc:`/blueprints` for more information. - - :param name: The name of the blueprint. Will be prepended to each - endpoint name. - :param import_name: The name of the blueprint package, usually - ``__name__``. This helps locate the ``root_path`` for the - blueprint. - :param static_folder: A folder with static files that should be - served by the blueprint's static route. The path is relative to - the blueprint's root path. Blueprint static files are disabled - by default. - :param static_url_path: The url to serve static files from. - Defaults to ``static_folder``. If the blueprint does not have - a ``url_prefix``, the app's static route will take precedence, - and the blueprint's static files won't be accessible. - :param template_folder: A folder with templates that should be added - to the app's template search path. The path is relative to the - blueprint's root path. Blueprint templates are disabled by - default. Blueprint templates have a lower precedence than those - in the app's templates folder. - :param url_prefix: A path to prepend to all of the blueprint's URLs, - to make them distinct from the rest of the app's routes. - :param subdomain: A subdomain that blueprint routes will match on by - default. - :param url_defaults: A dict of default values that blueprint routes - will receive by default. - :param root_path: By default, the blueprint will automatically set - this based on ``import_name``. In certain situations this - automatic detection can fail, so the path can be specified - manually instead. - - .. versionchanged:: 1.1.0 - Blueprints have a ``cli`` group to register nested CLI commands. - The ``cli_group`` parameter controls the name of the group under - the ``flask`` command. - - .. versionadded:: 0.7 - """ - - _got_registered_once = False - - def __init__( - self, - name: str, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - url_prefix: str | None = None, - subdomain: str | None = None, - url_defaults: dict[str, t.Any] | None = None, - root_path: str | None = None, - cli_group: str | None = _sentinel, # type: ignore[assignment] - ): - super().__init__( - import_name=import_name, - static_folder=static_folder, - static_url_path=static_url_path, - template_folder=template_folder, - root_path=root_path, - ) - - if not name: - raise ValueError("'name' may not be empty.") - - if "." in name: - raise ValueError("'name' may not contain a dot '.' character.") - - self.name = name - self.url_prefix = url_prefix - self.subdomain = subdomain - self.deferred_functions: list[DeferredSetupFunction] = [] - - if url_defaults is None: - url_defaults = {} - - self.url_values_defaults = url_defaults - self.cli_group = cli_group - self._blueprints: list[tuple[Blueprint, dict[str, t.Any]]] = [] - - def _check_setup_finished(self, f_name: str) -> None: - if self._got_registered_once: - raise AssertionError( - f"The setup method '{f_name}' can no longer be called on the blueprint" - f" '{self.name}'. It has already been registered at least once, any" - " changes will not be applied consistently.\n" - "Make sure all imports, decorators, functions, etc. needed to set up" - " the blueprint are done before registering it." - ) - - @setupmethod - def record(self, func: DeferredSetupFunction) -> None: - """Registers a function that is called when the blueprint is - registered on the application. This function is called with the - state as argument as returned by the :meth:`make_setup_state` - method. - """ - self.deferred_functions.append(func) - - @setupmethod - def record_once(self, func: DeferredSetupFunction) -> None: - """Works like :meth:`record` but wraps the function in another - function that will ensure the function is only called once. If the - blueprint is registered a second time on the application, the - function passed is not called. - """ - - def wrapper(state: BlueprintSetupState) -> None: - if state.first_registration: - func(state) - - self.record(update_wrapper(wrapper, func)) - - def make_setup_state( - self, app: App, options: dict[str, t.Any], first_registration: bool = False - ) -> BlueprintSetupState: - """Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState` - object that is later passed to the register callback functions. - Subclasses can override this to return a subclass of the setup state. - """ - return BlueprintSetupState(self, app, options, first_registration) - - @setupmethod - def register_blueprint(self, blueprint: Blueprint, **options: t.Any) -> None: - """Register a :class:`~flask.Blueprint` on this blueprint. Keyword - arguments passed to this method will override the defaults set - on the blueprint. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - - .. versionadded:: 2.0 - """ - if blueprint is self: - raise ValueError("Cannot register a blueprint on itself") - self._blueprints.append((blueprint, options)) - - def register(self, app: App, options: dict[str, t.Any]) -> None: - """Called by :meth:`Flask.register_blueprint` to register all - views and callbacks registered on the blueprint with the - application. Creates a :class:`.BlueprintSetupState` and calls - each :meth:`record` callback with it. - - :param app: The application this blueprint is being registered - with. - :param options: Keyword arguments forwarded from - :meth:`~Flask.register_blueprint`. - - .. versionchanged:: 2.3 - Nested blueprints now correctly apply subdomains. - - .. versionchanged:: 2.1 - Registering the same blueprint with the same name multiple - times is an error. - - .. versionchanged:: 2.0.1 - Nested blueprints are registered with their dotted name. - This allows different blueprints with the same name to be - nested at different locations. - - .. versionchanged:: 2.0.1 - The ``name`` option can be used to change the (pre-dotted) - name the blueprint is registered with. This allows the same - blueprint to be registered multiple times with unique names - for ``url_for``. - """ - name_prefix = options.get("name_prefix", "") - self_name = options.get("name", self.name) - name = f"{name_prefix}.{self_name}".lstrip(".") - - if name in app.blueprints: - bp_desc = "this" if app.blueprints[name] is self else "a different" - existing_at = f" '{name}'" if self_name != name else "" - - raise ValueError( - f"The name '{self_name}' is already registered for" - f" {bp_desc} blueprint{existing_at}. Use 'name=' to" - f" provide a unique name." - ) - - first_bp_registration = not any(bp is self for bp in app.blueprints.values()) - first_name_registration = name not in app.blueprints - - app.blueprints[name] = self - self._got_registered_once = True - state = self.make_setup_state(app, options, first_bp_registration) - - if self.has_static_folder: - state.add_url_rule( - f"{self.static_url_path}/", - view_func=self.send_static_file, # type: ignore[attr-defined] - endpoint="static", - ) - - # Merge blueprint data into parent. - if first_bp_registration or first_name_registration: - self._merge_blueprint_funcs(app, name) - - for deferred in self.deferred_functions: - deferred(state) - - cli_resolved_group = options.get("cli_group", self.cli_group) - - if self.cli.commands: - if cli_resolved_group is None: - app.cli.commands.update(self.cli.commands) - elif cli_resolved_group is _sentinel: - self.cli.name = name - app.cli.add_command(self.cli) - else: - self.cli.name = cli_resolved_group - app.cli.add_command(self.cli) - - for blueprint, bp_options in self._blueprints: - bp_options = bp_options.copy() - bp_url_prefix = bp_options.get("url_prefix") - bp_subdomain = bp_options.get("subdomain") - - if bp_subdomain is None: - bp_subdomain = blueprint.subdomain - - if state.subdomain is not None and bp_subdomain is not None: - bp_options["subdomain"] = bp_subdomain + "." + state.subdomain - elif bp_subdomain is not None: - bp_options["subdomain"] = bp_subdomain - elif state.subdomain is not None: - bp_options["subdomain"] = state.subdomain - - if bp_url_prefix is None: - bp_url_prefix = blueprint.url_prefix - - if state.url_prefix is not None and bp_url_prefix is not None: - bp_options["url_prefix"] = ( - state.url_prefix.rstrip("/") + "/" + bp_url_prefix.lstrip("/") - ) - elif bp_url_prefix is not None: - bp_options["url_prefix"] = bp_url_prefix - elif state.url_prefix is not None: - bp_options["url_prefix"] = state.url_prefix - - bp_options["name_prefix"] = name - blueprint.register(app, bp_options) - - def _merge_blueprint_funcs(self, app: App, name: str) -> None: - def extend( - bp_dict: dict[ft.AppOrBlueprintKey, list[t.Any]], - parent_dict: dict[ft.AppOrBlueprintKey, list[t.Any]], - ) -> None: - for key, values in bp_dict.items(): - key = name if key is None else f"{name}.{key}" - parent_dict[key].extend(values) - - for key, value in self.error_handler_spec.items(): - key = name if key is None else f"{name}.{key}" - value = defaultdict( - dict, - { - code: {exc_class: func for exc_class, func in code_values.items()} - for code, code_values in value.items() - }, - ) - app.error_handler_spec[key] = value - - for endpoint, func in self.view_functions.items(): - app.view_functions[endpoint] = func - - extend(self.before_request_funcs, app.before_request_funcs) - extend(self.after_request_funcs, app.after_request_funcs) - extend( - self.teardown_request_funcs, - app.teardown_request_funcs, - ) - extend(self.url_default_functions, app.url_default_functions) - extend(self.url_value_preprocessors, app.url_value_preprocessors) - extend(self.template_context_processors, app.template_context_processors) - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - """Register a URL rule with the blueprint. See :meth:`.Flask.add_url_rule` for - full documentation. - - The URL rule is prefixed with the blueprint's URL prefix. The endpoint name, - used with :func:`url_for`, is prefixed with the blueprint's name. - """ - if endpoint and "." in endpoint: - raise ValueError("'endpoint' may not contain a dot '.' character.") - - if view_func and hasattr(view_func, "__name__") and "." in view_func.__name__: - raise ValueError("'view_func' name may not contain a dot '.' character.") - - self.record( - lambda s: s.add_url_rule( - rule, - endpoint, - view_func, - provide_automatic_options=provide_automatic_options, - **options, - ) - ) - - @setupmethod - def app_template_filter( - self, name: str | None = None - ) -> t.Callable[[T_template_filter], T_template_filter]: - """Register a template filter, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_filter`. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def decorator(f: T_template_filter) -> T_template_filter: - self.add_app_template_filter(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_filter( - self, f: ft.TemplateFilterCallable, name: str | None = None - ) -> None: - """Register a template filter, available in any template rendered by the - application. Works like the :meth:`app_template_filter` decorator. Equivalent to - :meth:`.Flask.add_template_filter`. - - :param name: the optional name of the filter, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.filters[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def app_template_test( - self, name: str | None = None - ) -> t.Callable[[T_template_test], T_template_test]: - """Register a template test, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_test`. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def decorator(f: T_template_test) -> T_template_test: - self.add_app_template_test(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_test( - self, f: ft.TemplateTestCallable, name: str | None = None - ) -> None: - """Register a template test, available in any template rendered by the - application. Works like the :meth:`app_template_test` decorator. Equivalent to - :meth:`.Flask.add_template_test`. - - .. versionadded:: 0.10 - - :param name: the optional name of the test, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.tests[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def app_template_global( - self, name: str | None = None - ) -> t.Callable[[T_template_global], T_template_global]: - """Register a template global, available in any template rendered by the - application. Equivalent to :meth:`.Flask.template_global`. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def decorator(f: T_template_global) -> T_template_global: - self.add_app_template_global(f, name=name) - return f - - return decorator - - @setupmethod - def add_app_template_global( - self, f: ft.TemplateGlobalCallable, name: str | None = None - ) -> None: - """Register a template global, available in any template rendered by the - application. Works like the :meth:`app_template_global` decorator. Equivalent to - :meth:`.Flask.add_template_global`. - - .. versionadded:: 0.10 - - :param name: the optional name of the global, otherwise the - function name will be used. - """ - - def register_template(state: BlueprintSetupState) -> None: - state.app.jinja_env.globals[name or f.__name__] = f - - self.record_once(register_template) - - @setupmethod - def before_app_request(self, f: T_before_request) -> T_before_request: - """Like :meth:`before_request`, but before every request, not only those handled - by the blueprint. Equivalent to :meth:`.Flask.before_request`. - """ - self.record_once( - lambda s: s.app.before_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def after_app_request(self, f: T_after_request) -> T_after_request: - """Like :meth:`after_request`, but after every request, not only those handled - by the blueprint. Equivalent to :meth:`.Flask.after_request`. - """ - self.record_once( - lambda s: s.app.after_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def teardown_app_request(self, f: T_teardown) -> T_teardown: - """Like :meth:`teardown_request`, but after every request, not only those - handled by the blueprint. Equivalent to :meth:`.Flask.teardown_request`. - """ - self.record_once( - lambda s: s.app.teardown_request_funcs.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_context_processor( - self, f: T_template_context_processor - ) -> T_template_context_processor: - """Like :meth:`context_processor`, but for templates rendered by every view, not - only by the blueprint. Equivalent to :meth:`.Flask.context_processor`. - """ - self.record_once( - lambda s: s.app.template_context_processors.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_errorhandler( - self, code: type[Exception] | int - ) -> t.Callable[[T_error_handler], T_error_handler]: - """Like :meth:`errorhandler`, but for every request, not only those handled by - the blueprint. Equivalent to :meth:`.Flask.errorhandler`. - """ - - def decorator(f: T_error_handler) -> T_error_handler: - def from_blueprint(state: BlueprintSetupState) -> None: - state.app.errorhandler(code)(f) - - self.record_once(from_blueprint) - return f - - return decorator - - @setupmethod - def app_url_value_preprocessor( - self, f: T_url_value_preprocessor - ) -> T_url_value_preprocessor: - """Like :meth:`url_value_preprocessor`, but for every request, not only those - handled by the blueprint. Equivalent to :meth:`.Flask.url_value_preprocessor`. - """ - self.record_once( - lambda s: s.app.url_value_preprocessors.setdefault(None, []).append(f) - ) - return f - - @setupmethod - def app_url_defaults(self, f: T_url_defaults) -> T_url_defaults: - """Like :meth:`url_defaults`, but for every request, not only those handled by - the blueprint. Equivalent to :meth:`.Flask.url_defaults`. - """ - self.record_once( - lambda s: s.app.url_default_functions.setdefault(None, []).append(f) - ) - return f diff --git a/myenv/Lib/site-packages/flask/sansio/scaffold.py b/myenv/Lib/site-packages/flask/sansio/scaffold.py deleted file mode 100644 index 69e33a0..0000000 --- a/myenv/Lib/site-packages/flask/sansio/scaffold.py +++ /dev/null @@ -1,801 +0,0 @@ -from __future__ import annotations - -import importlib.util -import os -import pathlib -import sys -import typing as t -from collections import defaultdict -from functools import update_wrapper - -from jinja2 import BaseLoader -from jinja2 import FileSystemLoader -from werkzeug.exceptions import default_exceptions -from werkzeug.exceptions import HTTPException -from werkzeug.utils import cached_property - -from .. import typing as ft -from ..helpers import get_root_path -from ..templating import _default_template_ctx_processor - -if t.TYPE_CHECKING: # pragma: no cover - from click import Group - -# a singleton sentinel value for parameter defaults -_sentinel = object() - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) -T_after_request = t.TypeVar("T_after_request", bound=ft.AfterRequestCallable[t.Any]) -T_before_request = t.TypeVar("T_before_request", bound=ft.BeforeRequestCallable) -T_error_handler = t.TypeVar("T_error_handler", bound=ft.ErrorHandlerCallable) -T_teardown = t.TypeVar("T_teardown", bound=ft.TeardownCallable) -T_template_context_processor = t.TypeVar( - "T_template_context_processor", bound=ft.TemplateContextProcessorCallable -) -T_url_defaults = t.TypeVar("T_url_defaults", bound=ft.URLDefaultCallable) -T_url_value_preprocessor = t.TypeVar( - "T_url_value_preprocessor", bound=ft.URLValuePreprocessorCallable -) -T_route = t.TypeVar("T_route", bound=ft.RouteCallable) - - -def setupmethod(f: F) -> F: - f_name = f.__name__ - - def wrapper_func(self: Scaffold, *args: t.Any, **kwargs: t.Any) -> t.Any: - self._check_setup_finished(f_name) - return f(self, *args, **kwargs) - - return t.cast(F, update_wrapper(wrapper_func, f)) - - -class Scaffold: - """Common behavior shared between :class:`~flask.Flask` and - :class:`~flask.blueprints.Blueprint`. - - :param import_name: The import name of the module where this object - is defined. Usually :attr:`__name__` should be used. - :param static_folder: Path to a folder of static files to serve. - If this is set, a static route will be added. - :param static_url_path: URL prefix for the static route. - :param template_folder: Path to a folder containing template files. - for rendering. If this is set, a Jinja loader will be added. - :param root_path: The path that static, template, and resource files - are relative to. Typically not set, it is discovered based on - the ``import_name``. - - .. versionadded:: 2.0 - """ - - cli: Group - name: str - _static_folder: str | None = None - _static_url_path: str | None = None - - def __init__( - self, - import_name: str, - static_folder: str | os.PathLike[str] | None = None, - static_url_path: str | None = None, - template_folder: str | os.PathLike[str] | None = None, - root_path: str | None = None, - ): - #: The name of the package or module that this object belongs - #: to. Do not change this once it is set by the constructor. - self.import_name = import_name - - self.static_folder = static_folder # type: ignore - self.static_url_path = static_url_path - - #: The path to the templates folder, relative to - #: :attr:`root_path`, to add to the template loader. ``None`` if - #: templates should not be added. - self.template_folder = template_folder - - if root_path is None: - root_path = get_root_path(self.import_name) - - #: Absolute path to the package on the filesystem. Used to look - #: up resources contained in the package. - self.root_path = root_path - - #: A dictionary mapping endpoint names to view functions. - #: - #: To register a view function, use the :meth:`route` decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.view_functions: dict[str, ft.RouteCallable] = {} - - #: A data structure of registered error handlers, in the format - #: ``{scope: {code: {class: handler}}}``. The ``scope`` key is - #: the name of a blueprint the handlers are active for, or - #: ``None`` for all requests. The ``code`` key is the HTTP - #: status code for ``HTTPException``, or ``None`` for - #: other exceptions. The innermost dictionary maps exception - #: classes to handler functions. - #: - #: To register an error handler, use the :meth:`errorhandler` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.error_handler_spec: dict[ - ft.AppOrBlueprintKey, - dict[int | None, dict[type[Exception], ft.ErrorHandlerCallable]], - ] = defaultdict(lambda: defaultdict(dict)) - - #: A data structure of functions to call at the beginning of - #: each request, in the format ``{scope: [functions]}``. The - #: ``scope`` key is the name of a blueprint the functions are - #: active for, or ``None`` for all requests. - #: - #: To register a function, use the :meth:`before_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.before_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.BeforeRequestCallable] - ] = defaultdict(list) - - #: A data structure of functions to call at the end of each - #: request, in the format ``{scope: [functions]}``. The - #: ``scope`` key is the name of a blueprint the functions are - #: active for, or ``None`` for all requests. - #: - #: To register a function, use the :meth:`after_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.after_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.AfterRequestCallable[t.Any]] - ] = defaultdict(list) - - #: A data structure of functions to call at the end of each - #: request even if an exception is raised, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`teardown_request` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.teardown_request_funcs: dict[ - ft.AppOrBlueprintKey, list[ft.TeardownCallable] - ] = defaultdict(list) - - #: A data structure of functions to call to pass extra context - #: values when rendering templates, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`context_processor` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.template_context_processors: dict[ - ft.AppOrBlueprintKey, list[ft.TemplateContextProcessorCallable] - ] = defaultdict(list, {None: [_default_template_ctx_processor]}) - - #: A data structure of functions to call to modify the keyword - #: arguments passed to the view function, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the - #: :meth:`url_value_preprocessor` decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.url_value_preprocessors: dict[ - ft.AppOrBlueprintKey, - list[ft.URLValuePreprocessorCallable], - ] = defaultdict(list) - - #: A data structure of functions to call to modify the keyword - #: arguments when generating URLs, in the format - #: ``{scope: [functions]}``. The ``scope`` key is the name of a - #: blueprint the functions are active for, or ``None`` for all - #: requests. - #: - #: To register a function, use the :meth:`url_defaults` - #: decorator. - #: - #: This data structure is internal. It should not be modified - #: directly and its format may change at any time. - self.url_default_functions: dict[ - ft.AppOrBlueprintKey, list[ft.URLDefaultCallable] - ] = defaultdict(list) - - def __repr__(self) -> str: - return f"<{type(self).__name__} {self.name!r}>" - - def _check_setup_finished(self, f_name: str) -> None: - raise NotImplementedError - - @property - def static_folder(self) -> str | None: - """The absolute path to the configured static folder. ``None`` - if no static folder is set. - """ - if self._static_folder is not None: - return os.path.join(self.root_path, self._static_folder) - else: - return None - - @static_folder.setter - def static_folder(self, value: str | os.PathLike[str] | None) -> None: - if value is not None: - value = os.fspath(value).rstrip(r"\/") - - self._static_folder = value - - @property - def has_static_folder(self) -> bool: - """``True`` if :attr:`static_folder` is set. - - .. versionadded:: 0.5 - """ - return self.static_folder is not None - - @property - def static_url_path(self) -> str | None: - """The URL prefix that the static route will be accessible from. - - If it was not configured during init, it is derived from - :attr:`static_folder`. - """ - if self._static_url_path is not None: - return self._static_url_path - - if self.static_folder is not None: - basename = os.path.basename(self.static_folder) - return f"/{basename}".rstrip("/") - - return None - - @static_url_path.setter - def static_url_path(self, value: str | None) -> None: - if value is not None: - value = value.rstrip("/") - - self._static_url_path = value - - @cached_property - def jinja_loader(self) -> BaseLoader | None: - """The Jinja loader for this object's templates. By default this - is a class :class:`jinja2.loaders.FileSystemLoader` to - :attr:`template_folder` if it is set. - - .. versionadded:: 0.5 - """ - if self.template_folder is not None: - return FileSystemLoader(os.path.join(self.root_path, self.template_folder)) - else: - return None - - def _method_route( - self, - method: str, - rule: str, - options: dict[str, t.Any], - ) -> t.Callable[[T_route], T_route]: - if "methods" in options: - raise TypeError("Use the 'route' decorator to use the 'methods' argument.") - - return self.route(rule, methods=[method], **options) - - @setupmethod - def get(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["GET"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("GET", rule, options) - - @setupmethod - def post(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["POST"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("POST", rule, options) - - @setupmethod - def put(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["PUT"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("PUT", rule, options) - - @setupmethod - def delete(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["DELETE"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("DELETE", rule, options) - - @setupmethod - def patch(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Shortcut for :meth:`route` with ``methods=["PATCH"]``. - - .. versionadded:: 2.0 - """ - return self._method_route("PATCH", rule, options) - - @setupmethod - def route(self, rule: str, **options: t.Any) -> t.Callable[[T_route], T_route]: - """Decorate a view function to register it with the given URL - rule and options. Calls :meth:`add_url_rule`, which has more - details about the implementation. - - .. code-block:: python - - @app.route("/") - def index(): - return "Hello, World!" - - See :ref:`url-route-registrations`. - - The endpoint name for the route defaults to the name of the view - function if the ``endpoint`` parameter isn't passed. - - The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` and - ``OPTIONS`` are added automatically. - - :param rule: The URL rule string. - :param options: Extra options passed to the - :class:`~werkzeug.routing.Rule` object. - """ - - def decorator(f: T_route) -> T_route: - endpoint = options.pop("endpoint", None) - self.add_url_rule(rule, endpoint, f, **options) - return f - - return decorator - - @setupmethod - def add_url_rule( - self, - rule: str, - endpoint: str | None = None, - view_func: ft.RouteCallable | None = None, - provide_automatic_options: bool | None = None, - **options: t.Any, - ) -> None: - """Register a rule for routing incoming requests and building - URLs. The :meth:`route` decorator is a shortcut to call this - with the ``view_func`` argument. These are equivalent: - - .. code-block:: python - - @app.route("/") - def index(): - ... - - .. code-block:: python - - def index(): - ... - - app.add_url_rule("/", view_func=index) - - See :ref:`url-route-registrations`. - - The endpoint name for the route defaults to the name of the view - function if the ``endpoint`` parameter isn't passed. An error - will be raised if a function has already been registered for the - endpoint. - - The ``methods`` parameter defaults to ``["GET"]``. ``HEAD`` is - always added automatically, and ``OPTIONS`` is added - automatically by default. - - ``view_func`` does not necessarily need to be passed, but if the - rule should participate in routing an endpoint name must be - associated with a view function at some point with the - :meth:`endpoint` decorator. - - .. code-block:: python - - app.add_url_rule("/", endpoint="index") - - @app.endpoint("index") - def index(): - ... - - If ``view_func`` has a ``required_methods`` attribute, those - methods are added to the passed and automatic methods. If it - has a ``provide_automatic_methods`` attribute, it is used as the - default if the parameter is not passed. - - :param rule: The URL rule string. - :param endpoint: The endpoint name to associate with the rule - and view function. Used when routing and building URLs. - Defaults to ``view_func.__name__``. - :param view_func: The view function to associate with the - endpoint name. - :param provide_automatic_options: Add the ``OPTIONS`` method and - respond to ``OPTIONS`` requests automatically. - :param options: Extra options passed to the - :class:`~werkzeug.routing.Rule` object. - """ - raise NotImplementedError - - @setupmethod - def endpoint(self, endpoint: str) -> t.Callable[[F], F]: - """Decorate a view function to register it for the given - endpoint. Used if a rule is added without a ``view_func`` with - :meth:`add_url_rule`. - - .. code-block:: python - - app.add_url_rule("/ex", endpoint="example") - - @app.endpoint("example") - def example(): - ... - - :param endpoint: The endpoint name to associate with the view - function. - """ - - def decorator(f: F) -> F: - self.view_functions[endpoint] = f - return f - - return decorator - - @setupmethod - def before_request(self, f: T_before_request) -> T_before_request: - """Register a function to run before each request. - - For example, this can be used to open a database connection, or - to load the logged in user from the session. - - .. code-block:: python - - @app.before_request - def load_user(): - if "user_id" in session: - g.user = db.session.get(session["user_id"]) - - The function will be called without any arguments. If it returns - a non-``None`` value, the value is handled as if it was the - return value from the view, and further request handling is - stopped. - - This is available on both app and blueprint objects. When used on an app, this - executes before every request. When used on a blueprint, this executes before - every request that the blueprint handles. To register with a blueprint and - execute before every request, use :meth:`.Blueprint.before_app_request`. - """ - self.before_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def after_request(self, f: T_after_request) -> T_after_request: - """Register a function to run after each request to this object. - - The function is called with the response object, and must return - a response object. This allows the functions to modify or - replace the response before it is sent. - - If a function raises an exception, any remaining - ``after_request`` functions will not be called. Therefore, this - should not be used for actions that must execute, such as to - close resources. Use :meth:`teardown_request` for that. - - This is available on both app and blueprint objects. When used on an app, this - executes after every request. When used on a blueprint, this executes after - every request that the blueprint handles. To register with a blueprint and - execute after every request, use :meth:`.Blueprint.after_app_request`. - """ - self.after_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def teardown_request(self, f: T_teardown) -> T_teardown: - """Register a function to be called when the request context is - popped. Typically this happens at the end of each request, but - contexts may be pushed manually as well during testing. - - .. code-block:: python - - with app.test_request_context(): - ... - - When the ``with`` block exits (or ``ctx.pop()`` is called), the - teardown functions are called just before the request context is - made inactive. - - When a teardown function was called because of an unhandled - exception it will be passed an error object. If an - :meth:`errorhandler` is registered, it will handle the exception - and the teardown will not receive it. - - Teardown functions must avoid raising exceptions. If they - execute code that might fail they must surround that code with a - ``try``/``except`` block and log any errors. - - The return values of teardown functions are ignored. - - This is available on both app and blueprint objects. When used on an app, this - executes after every request. When used on a blueprint, this executes after - every request that the blueprint handles. To register with a blueprint and - execute after every request, use :meth:`.Blueprint.teardown_app_request`. - """ - self.teardown_request_funcs.setdefault(None, []).append(f) - return f - - @setupmethod - def context_processor( - self, - f: T_template_context_processor, - ) -> T_template_context_processor: - """Registers a template context processor function. These functions run before - rendering a template. The keys of the returned dict are added as variables - available in the template. - - This is available on both app and blueprint objects. When used on an app, this - is called for every rendered template. When used on a blueprint, this is called - for templates rendered from the blueprint's views. To register with a blueprint - and affect every template, use :meth:`.Blueprint.app_context_processor`. - """ - self.template_context_processors[None].append(f) - return f - - @setupmethod - def url_value_preprocessor( - self, - f: T_url_value_preprocessor, - ) -> T_url_value_preprocessor: - """Register a URL value preprocessor function for all view - functions in the application. These functions will be called before the - :meth:`before_request` functions. - - The function can modify the values captured from the matched url before - they are passed to the view. For example, this can be used to pop a - common language code value and place it in ``g`` rather than pass it to - every view. - - The function is passed the endpoint name and values dict. The return - value is ignored. - - This is available on both app and blueprint objects. When used on an app, this - is called for every request. When used on a blueprint, this is called for - requests that the blueprint handles. To register with a blueprint and affect - every request, use :meth:`.Blueprint.app_url_value_preprocessor`. - """ - self.url_value_preprocessors[None].append(f) - return f - - @setupmethod - def url_defaults(self, f: T_url_defaults) -> T_url_defaults: - """Callback function for URL defaults for all view functions of the - application. It's called with the endpoint and values and should - update the values passed in place. - - This is available on both app and blueprint objects. When used on an app, this - is called for every request. When used on a blueprint, this is called for - requests that the blueprint handles. To register with a blueprint and affect - every request, use :meth:`.Blueprint.app_url_defaults`. - """ - self.url_default_functions[None].append(f) - return f - - @setupmethod - def errorhandler( - self, code_or_exception: type[Exception] | int - ) -> t.Callable[[T_error_handler], T_error_handler]: - """Register a function to handle errors by code or exception class. - - A decorator that is used to register a function given an - error code. Example:: - - @app.errorhandler(404) - def page_not_found(error): - return 'This page does not exist', 404 - - You can also register handlers for arbitrary exceptions:: - - @app.errorhandler(DatabaseError) - def special_exception_handler(error): - return 'Database connection failed', 500 - - This is available on both app and blueprint objects. When used on an app, this - can handle errors from every request. When used on a blueprint, this can handle - errors from requests that the blueprint handles. To register with a blueprint - and affect every request, use :meth:`.Blueprint.app_errorhandler`. - - .. versionadded:: 0.7 - Use :meth:`register_error_handler` instead of modifying - :attr:`error_handler_spec` directly, for application wide error - handlers. - - .. versionadded:: 0.7 - One can now additionally also register custom exception types - that do not necessarily have to be a subclass of the - :class:`~werkzeug.exceptions.HTTPException` class. - - :param code_or_exception: the code as integer for the handler, or - an arbitrary exception - """ - - def decorator(f: T_error_handler) -> T_error_handler: - self.register_error_handler(code_or_exception, f) - return f - - return decorator - - @setupmethod - def register_error_handler( - self, - code_or_exception: type[Exception] | int, - f: ft.ErrorHandlerCallable, - ) -> None: - """Alternative error attach function to the :meth:`errorhandler` - decorator that is more straightforward to use for non decorator - usage. - - .. versionadded:: 0.7 - """ - exc_class, code = self._get_exc_class_and_code(code_or_exception) - self.error_handler_spec[None][code][exc_class] = f - - @staticmethod - def _get_exc_class_and_code( - exc_class_or_code: type[Exception] | int, - ) -> tuple[type[Exception], int | None]: - """Get the exception class being handled. For HTTP status codes - or ``HTTPException`` subclasses, return both the exception and - status code. - - :param exc_class_or_code: Any exception class, or an HTTP status - code as an integer. - """ - exc_class: type[Exception] - - if isinstance(exc_class_or_code, int): - try: - exc_class = default_exceptions[exc_class_or_code] - except KeyError: - raise ValueError( - f"'{exc_class_or_code}' is not a recognized HTTP" - " error code. Use a subclass of HTTPException with" - " that code instead." - ) from None - else: - exc_class = exc_class_or_code - - if isinstance(exc_class, Exception): - raise TypeError( - f"{exc_class!r} is an instance, not a class. Handlers" - " can only be registered for Exception classes or HTTP" - " error codes." - ) - - if not issubclass(exc_class, Exception): - raise ValueError( - f"'{exc_class.__name__}' is not a subclass of Exception." - " Handlers can only be registered for Exception classes" - " or HTTP error codes." - ) - - if issubclass(exc_class, HTTPException): - return exc_class, exc_class.code - else: - return exc_class, None - - -def _endpoint_from_view_func(view_func: ft.RouteCallable) -> str: - """Internal helper that returns the default endpoint for a given - function. This always is the function name. - """ - assert view_func is not None, "expected view func if endpoint is not provided." - return view_func.__name__ - - -def _path_is_relative_to(path: pathlib.PurePath, base: str) -> bool: - # Path.is_relative_to doesn't exist until Python 3.9 - try: - path.relative_to(base) - return True - except ValueError: - return False - - -def _find_package_path(import_name: str) -> str: - """Find the path that contains the package or module.""" - root_mod_name, _, _ = import_name.partition(".") - - try: - root_spec = importlib.util.find_spec(root_mod_name) - - if root_spec is None: - raise ValueError("not found") - except (ImportError, ValueError): - # ImportError: the machinery told us it does not exist - # ValueError: - # - the module name was invalid - # - the module name is __main__ - # - we raised `ValueError` due to `root_spec` being `None` - return os.getcwd() - - if root_spec.submodule_search_locations: - if root_spec.origin is None or root_spec.origin == "namespace": - # namespace package - package_spec = importlib.util.find_spec(import_name) - - if package_spec is not None and package_spec.submodule_search_locations: - # Pick the path in the namespace that contains the submodule. - package_path = pathlib.Path( - os.path.commonpath(package_spec.submodule_search_locations) - ) - search_location = next( - location - for location in root_spec.submodule_search_locations - if _path_is_relative_to(package_path, location) - ) - else: - # Pick the first path. - search_location = root_spec.submodule_search_locations[0] - - return os.path.dirname(search_location) - else: - # package with __init__.py - return os.path.dirname(os.path.dirname(root_spec.origin)) - else: - # module - return os.path.dirname(root_spec.origin) # type: ignore[type-var, return-value] - - -def find_package(import_name: str) -> tuple[str | None, str]: - """Find the prefix that a package is installed under, and the path - that it would be imported from. - - The prefix is the directory containing the standard directory - hierarchy (lib, bin, etc.). If the package is not installed to the - system (:attr:`sys.prefix`) or a virtualenv (``site-packages``), - ``None`` is returned. - - The path is the entry in :attr:`sys.path` that contains the package - for import. If the package is not installed, it's assumed that the - package was imported from the current working directory. - """ - package_path = _find_package_path(import_name) - py_prefix = os.path.abspath(sys.prefix) - - # installed to the system - if _path_is_relative_to(pathlib.PurePath(package_path), py_prefix): - return py_prefix, package_path - - site_parent, site_folder = os.path.split(package_path) - - # installed to a virtualenv - if site_folder.lower() == "site-packages": - parent, folder = os.path.split(site_parent) - - # Windows (prefix/lib/site-packages) - if folder.lower() == "lib": - return parent, package_path - - # Unix (prefix/lib/pythonX.Y/site-packages) - if os.path.basename(parent).lower() == "lib": - return os.path.dirname(parent), package_path - - # something else (prefix/site-packages) - return site_parent, package_path - - # not installed - return None, package_path diff --git a/myenv/Lib/site-packages/flask/sessions.py b/myenv/Lib/site-packages/flask/sessions.py deleted file mode 100644 index ee19ad6..0000000 --- a/myenv/Lib/site-packages/flask/sessions.py +++ /dev/null @@ -1,379 +0,0 @@ -from __future__ import annotations - -import hashlib -import typing as t -from collections.abc import MutableMapping -from datetime import datetime -from datetime import timezone - -from itsdangerous import BadSignature -from itsdangerous import URLSafeTimedSerializer -from werkzeug.datastructures import CallbackDict - -from .json.tag import TaggedJSONSerializer - -if t.TYPE_CHECKING: # pragma: no cover - import typing_extensions as te - - from .app import Flask - from .wrappers import Request - from .wrappers import Response - - -# TODO generic when Python > 3.8 -class SessionMixin(MutableMapping): # type: ignore[type-arg] - """Expands a basic dictionary with session attributes.""" - - @property - def permanent(self) -> bool: - """This reflects the ``'_permanent'`` key in the dict.""" - return self.get("_permanent", False) - - @permanent.setter - def permanent(self, value: bool) -> None: - self["_permanent"] = bool(value) - - #: Some implementations can detect whether a session is newly - #: created, but that is not guaranteed. Use with caution. The mixin - # default is hard-coded ``False``. - new = False - - #: Some implementations can detect changes to the session and set - #: this when that happens. The mixin default is hard coded to - #: ``True``. - modified = True - - #: Some implementations can detect when session data is read or - #: written and set this when that happens. The mixin default is hard - #: coded to ``True``. - accessed = True - - -# TODO generic when Python > 3.8 -class SecureCookieSession(CallbackDict, SessionMixin): # type: ignore[type-arg] - """Base class for sessions based on signed cookies. - - This session backend will set the :attr:`modified` and - :attr:`accessed` attributes. It cannot reliably track whether a - session is new (vs. empty), so :attr:`new` remains hard coded to - ``False``. - """ - - #: When data is changed, this is set to ``True``. Only the session - #: dictionary itself is tracked; if the session contains mutable - #: data (for example a nested dict) then this must be set to - #: ``True`` manually when modifying that data. The session cookie - #: will only be written to the response if this is ``True``. - modified = False - - #: When data is read or written, this is set to ``True``. Used by - # :class:`.SecureCookieSessionInterface` to add a ``Vary: Cookie`` - #: header, which allows caching proxies to cache different pages for - #: different users. - accessed = False - - def __init__(self, initial: t.Any = None) -> None: - def on_update(self: te.Self) -> None: - self.modified = True - self.accessed = True - - super().__init__(initial, on_update) - - def __getitem__(self, key: str) -> t.Any: - self.accessed = True - return super().__getitem__(key) - - def get(self, key: str, default: t.Any = None) -> t.Any: - self.accessed = True - return super().get(key, default) - - def setdefault(self, key: str, default: t.Any = None) -> t.Any: - self.accessed = True - return super().setdefault(key, default) - - -class NullSession(SecureCookieSession): - """Class used to generate nicer error messages if sessions are not - available. Will still allow read-only access to the empty session - but fail on setting. - """ - - def _fail(self, *args: t.Any, **kwargs: t.Any) -> t.NoReturn: - raise RuntimeError( - "The session is unavailable because no secret " - "key was set. Set the secret_key on the " - "application to something unique and secret." - ) - - __setitem__ = __delitem__ = clear = pop = popitem = update = setdefault = _fail # type: ignore # noqa: B950 - del _fail - - -class SessionInterface: - """The basic interface you have to implement in order to replace the - default session interface which uses werkzeug's securecookie - implementation. The only methods you have to implement are - :meth:`open_session` and :meth:`save_session`, the others have - useful defaults which you don't need to change. - - The session object returned by the :meth:`open_session` method has to - provide a dictionary like interface plus the properties and methods - from the :class:`SessionMixin`. We recommend just subclassing a dict - and adding that mixin:: - - class Session(dict, SessionMixin): - pass - - If :meth:`open_session` returns ``None`` Flask will call into - :meth:`make_null_session` to create a session that acts as replacement - if the session support cannot work because some requirement is not - fulfilled. The default :class:`NullSession` class that is created - will complain that the secret key was not set. - - To replace the session interface on an application all you have to do - is to assign :attr:`flask.Flask.session_interface`:: - - app = Flask(__name__) - app.session_interface = MySessionInterface() - - Multiple requests with the same session may be sent and handled - concurrently. When implementing a new session interface, consider - whether reads or writes to the backing store must be synchronized. - There is no guarantee on the order in which the session for each - request is opened or saved, it will occur in the order that requests - begin and end processing. - - .. versionadded:: 0.8 - """ - - #: :meth:`make_null_session` will look here for the class that should - #: be created when a null session is requested. Likewise the - #: :meth:`is_null_session` method will perform a typecheck against - #: this type. - null_session_class = NullSession - - #: A flag that indicates if the session interface is pickle based. - #: This can be used by Flask extensions to make a decision in regards - #: to how to deal with the session object. - #: - #: .. versionadded:: 0.10 - pickle_based = False - - def make_null_session(self, app: Flask) -> NullSession: - """Creates a null session which acts as a replacement object if the - real session support could not be loaded due to a configuration - error. This mainly aids the user experience because the job of the - null session is to still support lookup without complaining but - modifications are answered with a helpful error message of what - failed. - - This creates an instance of :attr:`null_session_class` by default. - """ - return self.null_session_class() - - def is_null_session(self, obj: object) -> bool: - """Checks if a given object is a null session. Null sessions are - not asked to be saved. - - This checks if the object is an instance of :attr:`null_session_class` - by default. - """ - return isinstance(obj, self.null_session_class) - - def get_cookie_name(self, app: Flask) -> str: - """The name of the session cookie. Uses``app.config["SESSION_COOKIE_NAME"]``.""" - return app.config["SESSION_COOKIE_NAME"] # type: ignore[no-any-return] - - def get_cookie_domain(self, app: Flask) -> str | None: - """The value of the ``Domain`` parameter on the session cookie. If not set, - browsers will only send the cookie to the exact domain it was set from. - Otherwise, they will send it to any subdomain of the given value as well. - - Uses the :data:`SESSION_COOKIE_DOMAIN` config. - - .. versionchanged:: 2.3 - Not set by default, does not fall back to ``SERVER_NAME``. - """ - return app.config["SESSION_COOKIE_DOMAIN"] # type: ignore[no-any-return] - - def get_cookie_path(self, app: Flask) -> str: - """Returns the path for which the cookie should be valid. The - default implementation uses the value from the ``SESSION_COOKIE_PATH`` - config var if it's set, and falls back to ``APPLICATION_ROOT`` or - uses ``/`` if it's ``None``. - """ - return app.config["SESSION_COOKIE_PATH"] or app.config["APPLICATION_ROOT"] # type: ignore[no-any-return] - - def get_cookie_httponly(self, app: Flask) -> bool: - """Returns True if the session cookie should be httponly. This - currently just returns the value of the ``SESSION_COOKIE_HTTPONLY`` - config var. - """ - return app.config["SESSION_COOKIE_HTTPONLY"] # type: ignore[no-any-return] - - def get_cookie_secure(self, app: Flask) -> bool: - """Returns True if the cookie should be secure. This currently - just returns the value of the ``SESSION_COOKIE_SECURE`` setting. - """ - return app.config["SESSION_COOKIE_SECURE"] # type: ignore[no-any-return] - - def get_cookie_samesite(self, app: Flask) -> str | None: - """Return ``'Strict'`` or ``'Lax'`` if the cookie should use the - ``SameSite`` attribute. This currently just returns the value of - the :data:`SESSION_COOKIE_SAMESITE` setting. - """ - return app.config["SESSION_COOKIE_SAMESITE"] # type: ignore[no-any-return] - - def get_expiration_time(self, app: Flask, session: SessionMixin) -> datetime | None: - """A helper method that returns an expiration date for the session - or ``None`` if the session is linked to the browser session. The - default implementation returns now + the permanent session - lifetime configured on the application. - """ - if session.permanent: - return datetime.now(timezone.utc) + app.permanent_session_lifetime - return None - - def should_set_cookie(self, app: Flask, session: SessionMixin) -> bool: - """Used by session backends to determine if a ``Set-Cookie`` header - should be set for this session cookie for this response. If the session - has been modified, the cookie is set. If the session is permanent and - the ``SESSION_REFRESH_EACH_REQUEST`` config is true, the cookie is - always set. - - This check is usually skipped if the session was deleted. - - .. versionadded:: 0.11 - """ - - return session.modified or ( - session.permanent and app.config["SESSION_REFRESH_EACH_REQUEST"] - ) - - def open_session(self, app: Flask, request: Request) -> SessionMixin | None: - """This is called at the beginning of each request, after - pushing the request context, before matching the URL. - - This must return an object which implements a dictionary-like - interface as well as the :class:`SessionMixin` interface. - - This will return ``None`` to indicate that loading failed in - some way that is not immediately an error. The request - context will fall back to using :meth:`make_null_session` - in this case. - """ - raise NotImplementedError() - - def save_session( - self, app: Flask, session: SessionMixin, response: Response - ) -> None: - """This is called at the end of each request, after generating - a response, before removing the request context. It is skipped - if :meth:`is_null_session` returns ``True``. - """ - raise NotImplementedError() - - -session_json_serializer = TaggedJSONSerializer() - - -def _lazy_sha1(string: bytes = b"") -> t.Any: - """Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include - SHA-1, in which case the import and use as a default would fail before the - developer can configure something else. - """ - return hashlib.sha1(string) - - -class SecureCookieSessionInterface(SessionInterface): - """The default session interface that stores sessions in signed cookies - through the :mod:`itsdangerous` module. - """ - - #: the salt that should be applied on top of the secret key for the - #: signing of cookie based sessions. - salt = "cookie-session" - #: the hash function to use for the signature. The default is sha1 - digest_method = staticmethod(_lazy_sha1) - #: the name of the itsdangerous supported key derivation. The default - #: is hmac. - key_derivation = "hmac" - #: A python serializer for the payload. The default is a compact - #: JSON derived serializer with support for some extra Python types - #: such as datetime objects or tuples. - serializer = session_json_serializer - session_class = SecureCookieSession - - def get_signing_serializer(self, app: Flask) -> URLSafeTimedSerializer | None: - if not app.secret_key: - return None - signer_kwargs = dict( - key_derivation=self.key_derivation, digest_method=self.digest_method - ) - return URLSafeTimedSerializer( - app.secret_key, - salt=self.salt, - serializer=self.serializer, - signer_kwargs=signer_kwargs, - ) - - def open_session(self, app: Flask, request: Request) -> SecureCookieSession | None: - s = self.get_signing_serializer(app) - if s is None: - return None - val = request.cookies.get(self.get_cookie_name(app)) - if not val: - return self.session_class() - max_age = int(app.permanent_session_lifetime.total_seconds()) - try: - data = s.loads(val, max_age=max_age) - return self.session_class(data) - except BadSignature: - return self.session_class() - - def save_session( - self, app: Flask, session: SessionMixin, response: Response - ) -> None: - name = self.get_cookie_name(app) - domain = self.get_cookie_domain(app) - path = self.get_cookie_path(app) - secure = self.get_cookie_secure(app) - samesite = self.get_cookie_samesite(app) - httponly = self.get_cookie_httponly(app) - - # Add a "Vary: Cookie" header if the session was accessed at all. - if session.accessed: - response.vary.add("Cookie") - - # If the session is modified to be empty, remove the cookie. - # If the session is empty, return without setting the cookie. - if not session: - if session.modified: - response.delete_cookie( - name, - domain=domain, - path=path, - secure=secure, - samesite=samesite, - httponly=httponly, - ) - response.vary.add("Cookie") - - return - - if not self.should_set_cookie(app, session): - return - - expires = self.get_expiration_time(app, session) - val = self.get_signing_serializer(app).dumps(dict(session)) # type: ignore - response.set_cookie( - name, - val, # type: ignore - expires=expires, - httponly=httponly, - domain=domain, - path=path, - secure=secure, - samesite=samesite, - ) - response.vary.add("Cookie") diff --git a/myenv/Lib/site-packages/flask/signals.py b/myenv/Lib/site-packages/flask/signals.py deleted file mode 100644 index 444fda9..0000000 --- a/myenv/Lib/site-packages/flask/signals.py +++ /dev/null @@ -1,17 +0,0 @@ -from __future__ import annotations - -from blinker import Namespace - -# This namespace is only for signals provided by Flask itself. -_signals = Namespace() - -template_rendered = _signals.signal("template-rendered") -before_render_template = _signals.signal("before-render-template") -request_started = _signals.signal("request-started") -request_finished = _signals.signal("request-finished") -request_tearing_down = _signals.signal("request-tearing-down") -got_request_exception = _signals.signal("got-request-exception") -appcontext_tearing_down = _signals.signal("appcontext-tearing-down") -appcontext_pushed = _signals.signal("appcontext-pushed") -appcontext_popped = _signals.signal("appcontext-popped") -message_flashed = _signals.signal("message-flashed") diff --git a/myenv/Lib/site-packages/flask/templating.py b/myenv/Lib/site-packages/flask/templating.py deleted file mode 100644 index 618a3b3..0000000 --- a/myenv/Lib/site-packages/flask/templating.py +++ /dev/null @@ -1,219 +0,0 @@ -from __future__ import annotations - -import typing as t - -from jinja2 import BaseLoader -from jinja2 import Environment as BaseEnvironment -from jinja2 import Template -from jinja2 import TemplateNotFound - -from .globals import _cv_app -from .globals import _cv_request -from .globals import current_app -from .globals import request -from .helpers import stream_with_context -from .signals import before_render_template -from .signals import template_rendered - -if t.TYPE_CHECKING: # pragma: no cover - from .app import Flask - from .sansio.app import App - from .sansio.scaffold import Scaffold - - -def _default_template_ctx_processor() -> dict[str, t.Any]: - """Default template context processor. Injects `request`, - `session` and `g`. - """ - appctx = _cv_app.get(None) - reqctx = _cv_request.get(None) - rv: dict[str, t.Any] = {} - if appctx is not None: - rv["g"] = appctx.g - if reqctx is not None: - rv["request"] = reqctx.request - rv["session"] = reqctx.session - return rv - - -class Environment(BaseEnvironment): - """Works like a regular Jinja2 environment but has some additional - knowledge of how Flask's blueprint works so that it can prepend the - name of the blueprint to referenced templates if necessary. - """ - - def __init__(self, app: App, **options: t.Any) -> None: - if "loader" not in options: - options["loader"] = app.create_global_jinja_loader() - BaseEnvironment.__init__(self, **options) - self.app = app - - -class DispatchingJinjaLoader(BaseLoader): - """A loader that looks for templates in the application and all - the blueprint folders. - """ - - def __init__(self, app: App) -> None: - self.app = app - - def get_source( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - if self.app.config["EXPLAIN_TEMPLATE_LOADING"]: - return self._get_source_explained(environment, template) - return self._get_source_fast(environment, template) - - def _get_source_explained( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - attempts = [] - rv: tuple[str, str | None, t.Callable[[], bool] | None] | None - trv: None | (tuple[str, str | None, t.Callable[[], bool] | None]) = None - - for srcobj, loader in self._iter_loaders(template): - try: - rv = loader.get_source(environment, template) - if trv is None: - trv = rv - except TemplateNotFound: - rv = None - attempts.append((loader, srcobj, rv)) - - from .debughelpers import explain_template_loading_attempts - - explain_template_loading_attempts(self.app, template, attempts) - - if trv is not None: - return trv - raise TemplateNotFound(template) - - def _get_source_fast( - self, environment: BaseEnvironment, template: str - ) -> tuple[str, str | None, t.Callable[[], bool] | None]: - for _srcobj, loader in self._iter_loaders(template): - try: - return loader.get_source(environment, template) - except TemplateNotFound: - continue - raise TemplateNotFound(template) - - def _iter_loaders(self, template: str) -> t.Iterator[tuple[Scaffold, BaseLoader]]: - loader = self.app.jinja_loader - if loader is not None: - yield self.app, loader - - for blueprint in self.app.iter_blueprints(): - loader = blueprint.jinja_loader - if loader is not None: - yield blueprint, loader - - def list_templates(self) -> list[str]: - result = set() - loader = self.app.jinja_loader - if loader is not None: - result.update(loader.list_templates()) - - for blueprint in self.app.iter_blueprints(): - loader = blueprint.jinja_loader - if loader is not None: - for template in loader.list_templates(): - result.add(template) - - return list(result) - - -def _render(app: Flask, template: Template, context: dict[str, t.Any]) -> str: - app.update_template_context(context) - before_render_template.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - rv = template.render(context) - template_rendered.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - return rv - - -def render_template( - template_name_or_list: str | Template | list[str | Template], - **context: t.Any, -) -> str: - """Render a template by name with the given context. - - :param template_name_or_list: The name of the template to render. If - a list is given, the first name to exist will be rendered. - :param context: The variables to make available in the template. - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.get_or_select_template(template_name_or_list) - return _render(app, template, context) - - -def render_template_string(source: str, **context: t.Any) -> str: - """Render a template from the given source string with the given - context. - - :param source: The source code of the template to render. - :param context: The variables to make available in the template. - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.from_string(source) - return _render(app, template, context) - - -def _stream( - app: Flask, template: Template, context: dict[str, t.Any] -) -> t.Iterator[str]: - app.update_template_context(context) - before_render_template.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - - def generate() -> t.Iterator[str]: - yield from template.generate(context) - template_rendered.send( - app, _async_wrapper=app.ensure_sync, template=template, context=context - ) - - rv = generate() - - # If a request context is active, keep it while generating. - if request: - rv = stream_with_context(rv) - - return rv - - -def stream_template( - template_name_or_list: str | Template | list[str | Template], - **context: t.Any, -) -> t.Iterator[str]: - """Render a template by name with the given context as a stream. - This returns an iterator of strings, which can be used as a - streaming response from a view. - - :param template_name_or_list: The name of the template to render. If - a list is given, the first name to exist will be rendered. - :param context: The variables to make available in the template. - - .. versionadded:: 2.2 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.get_or_select_template(template_name_or_list) - return _stream(app, template, context) - - -def stream_template_string(source: str, **context: t.Any) -> t.Iterator[str]: - """Render a template from the given source string with the given - context as a stream. This returns an iterator of strings, which can - be used as a streaming response from a view. - - :param source: The source code of the template to render. - :param context: The variables to make available in the template. - - .. versionadded:: 2.2 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - template = app.jinja_env.from_string(source) - return _stream(app, template, context) diff --git a/myenv/Lib/site-packages/flask/testing.py b/myenv/Lib/site-packages/flask/testing.py deleted file mode 100644 index a27b7c8..0000000 --- a/myenv/Lib/site-packages/flask/testing.py +++ /dev/null @@ -1,298 +0,0 @@ -from __future__ import annotations - -import importlib.metadata -import typing as t -from contextlib import contextmanager -from contextlib import ExitStack -from copy import copy -from types import TracebackType -from urllib.parse import urlsplit - -import werkzeug.test -from click.testing import CliRunner -from werkzeug.test import Client -from werkzeug.wrappers import Request as BaseRequest - -from .cli import ScriptInfo -from .sessions import SessionMixin - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIEnvironment - from werkzeug.test import TestResponse - - from .app import Flask - - -class EnvironBuilder(werkzeug.test.EnvironBuilder): - """An :class:`~werkzeug.test.EnvironBuilder`, that takes defaults from the - application. - - :param app: The Flask application to configure the environment from. - :param path: URL path being requested. - :param base_url: Base URL where the app is being served, which - ``path`` is relative to. If not given, built from - :data:`PREFERRED_URL_SCHEME`, ``subdomain``, - :data:`SERVER_NAME`, and :data:`APPLICATION_ROOT`. - :param subdomain: Subdomain name to append to :data:`SERVER_NAME`. - :param url_scheme: Scheme to use instead of - :data:`PREFERRED_URL_SCHEME`. - :param json: If given, this is serialized as JSON and passed as - ``data``. Also defaults ``content_type`` to - ``application/json``. - :param args: other positional arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - :param kwargs: other keyword arguments passed to - :class:`~werkzeug.test.EnvironBuilder`. - """ - - def __init__( - self, - app: Flask, - path: str = "/", - base_url: str | None = None, - subdomain: str | None = None, - url_scheme: str | None = None, - *args: t.Any, - **kwargs: t.Any, - ) -> None: - assert not (base_url or subdomain or url_scheme) or ( - base_url is not None - ) != bool( - subdomain or url_scheme - ), 'Cannot pass "subdomain" or "url_scheme" with "base_url".' - - if base_url is None: - http_host = app.config.get("SERVER_NAME") or "localhost" - app_root = app.config["APPLICATION_ROOT"] - - if subdomain: - http_host = f"{subdomain}.{http_host}" - - if url_scheme is None: - url_scheme = app.config["PREFERRED_URL_SCHEME"] - - url = urlsplit(path) - base_url = ( - f"{url.scheme or url_scheme}://{url.netloc or http_host}" - f"/{app_root.lstrip('/')}" - ) - path = url.path - - if url.query: - sep = b"?" if isinstance(url.query, bytes) else "?" - path += sep + url.query - - self.app = app - super().__init__(path, base_url, *args, **kwargs) - - def json_dumps(self, obj: t.Any, **kwargs: t.Any) -> str: # type: ignore - """Serialize ``obj`` to a JSON-formatted string. - - The serialization will be configured according to the config associated - with this EnvironBuilder's ``app``. - """ - return self.app.json.dumps(obj, **kwargs) - - -_werkzeug_version = "" - - -def _get_werkzeug_version() -> str: - global _werkzeug_version - - if not _werkzeug_version: - _werkzeug_version = importlib.metadata.version("werkzeug") - - return _werkzeug_version - - -class FlaskClient(Client): - """Works like a regular Werkzeug test client but has knowledge about - Flask's contexts to defer the cleanup of the request context until - the end of a ``with`` block. For general information about how to - use this class refer to :class:`werkzeug.test.Client`. - - .. versionchanged:: 0.12 - `app.test_client()` includes preset default environment, which can be - set after instantiation of the `app.test_client()` object in - `client.environ_base`. - - Basic usage is outlined in the :doc:`/testing` chapter. - """ - - application: Flask - - def __init__(self, *args: t.Any, **kwargs: t.Any) -> None: - super().__init__(*args, **kwargs) - self.preserve_context = False - self._new_contexts: list[t.ContextManager[t.Any]] = [] - self._context_stack = ExitStack() - self.environ_base = { - "REMOTE_ADDR": "127.0.0.1", - "HTTP_USER_AGENT": f"Werkzeug/{_get_werkzeug_version()}", - } - - @contextmanager - def session_transaction( - self, *args: t.Any, **kwargs: t.Any - ) -> t.Iterator[SessionMixin]: - """When used in combination with a ``with`` statement this opens a - session transaction. This can be used to modify the session that - the test client uses. Once the ``with`` block is left the session is - stored back. - - :: - - with client.session_transaction() as session: - session['value'] = 42 - - Internally this is implemented by going through a temporary test - request context and since session handling could depend on - request variables this function accepts the same arguments as - :meth:`~flask.Flask.test_request_context` which are directly - passed through. - """ - if self._cookies is None: - raise TypeError( - "Cookies are disabled. Create a client with 'use_cookies=True'." - ) - - app = self.application - ctx = app.test_request_context(*args, **kwargs) - self._add_cookies_to_wsgi(ctx.request.environ) - - with ctx: - sess = app.session_interface.open_session(app, ctx.request) - - if sess is None: - raise RuntimeError("Session backend did not open a session.") - - yield sess - resp = app.response_class() - - if app.session_interface.is_null_session(sess): - return - - with ctx: - app.session_interface.save_session(app, sess, resp) - - self._update_cookies_from_response( - ctx.request.host.partition(":")[0], - ctx.request.path, - resp.headers.getlist("Set-Cookie"), - ) - - def _copy_environ(self, other: WSGIEnvironment) -> WSGIEnvironment: - out = {**self.environ_base, **other} - - if self.preserve_context: - out["werkzeug.debug.preserve_context"] = self._new_contexts.append - - return out - - def _request_from_builder_args( - self, args: tuple[t.Any, ...], kwargs: dict[str, t.Any] - ) -> BaseRequest: - kwargs["environ_base"] = self._copy_environ(kwargs.get("environ_base", {})) - builder = EnvironBuilder(self.application, *args, **kwargs) - - try: - return builder.get_request() - finally: - builder.close() - - def open( - self, - *args: t.Any, - buffered: bool = False, - follow_redirects: bool = False, - **kwargs: t.Any, - ) -> TestResponse: - if args and isinstance( - args[0], (werkzeug.test.EnvironBuilder, dict, BaseRequest) - ): - if isinstance(args[0], werkzeug.test.EnvironBuilder): - builder = copy(args[0]) - builder.environ_base = self._copy_environ(builder.environ_base or {}) # type: ignore[arg-type] - request = builder.get_request() - elif isinstance(args[0], dict): - request = EnvironBuilder.from_environ( - args[0], app=self.application, environ_base=self._copy_environ({}) - ).get_request() - else: - # isinstance(args[0], BaseRequest) - request = copy(args[0]) - request.environ = self._copy_environ(request.environ) - else: - # request is None - request = self._request_from_builder_args(args, kwargs) - - # Pop any previously preserved contexts. This prevents contexts - # from being preserved across redirects or multiple requests - # within a single block. - self._context_stack.close() - - response = super().open( - request, - buffered=buffered, - follow_redirects=follow_redirects, - ) - response.json_module = self.application.json # type: ignore[assignment] - - # Re-push contexts that were preserved during the request. - while self._new_contexts: - cm = self._new_contexts.pop() - self._context_stack.enter_context(cm) - - return response - - def __enter__(self) -> FlaskClient: - if self.preserve_context: - raise RuntimeError("Cannot nest client invocations") - self.preserve_context = True - return self - - def __exit__( - self, - exc_type: type | None, - exc_value: BaseException | None, - tb: TracebackType | None, - ) -> None: - self.preserve_context = False - self._context_stack.close() - - -class FlaskCliRunner(CliRunner): - """A :class:`~click.testing.CliRunner` for testing a Flask app's - CLI commands. Typically created using - :meth:`~flask.Flask.test_cli_runner`. See :ref:`testing-cli`. - """ - - def __init__(self, app: Flask, **kwargs: t.Any) -> None: - self.app = app - super().__init__(**kwargs) - - def invoke( # type: ignore - self, cli: t.Any = None, args: t.Any = None, **kwargs: t.Any - ) -> t.Any: - """Invokes a CLI command in an isolated environment. See - :meth:`CliRunner.invoke ` for - full method documentation. See :ref:`testing-cli` for examples. - - If the ``obj`` argument is not given, passes an instance of - :class:`~flask.cli.ScriptInfo` that knows how to load the Flask - app being tested. - - :param cli: Command object to invoke. Default is the app's - :attr:`~flask.app.Flask.cli` group. - :param args: List of strings to invoke the command with. - - :return: a :class:`~click.testing.Result` object. - """ - if cli is None: - cli = self.app.cli - - if "obj" not in kwargs: - kwargs["obj"] = ScriptInfo(create_app=lambda: self.app) - - return super().invoke(cli, args, **kwargs) diff --git a/myenv/Lib/site-packages/flask/typing.py b/myenv/Lib/site-packages/flask/typing.py deleted file mode 100644 index cf6d4ae..0000000 --- a/myenv/Lib/site-packages/flask/typing.py +++ /dev/null @@ -1,90 +0,0 @@ -from __future__ import annotations - -import typing as t - -if t.TYPE_CHECKING: # pragma: no cover - from _typeshed.wsgi import WSGIApplication # noqa: F401 - from werkzeug.datastructures import Headers # noqa: F401 - from werkzeug.sansio.response import Response # noqa: F401 - -# The possible types that are directly convertible or are a Response object. -ResponseValue = t.Union[ - "Response", - str, - bytes, - t.List[t.Any], - # Only dict is actually accepted, but Mapping allows for TypedDict. - t.Mapping[str, t.Any], - t.Iterator[str], - t.Iterator[bytes], -] - -# the possible types for an individual HTTP header -# This should be a Union, but mypy doesn't pass unless it's a TypeVar. -HeaderValue = t.Union[str, t.List[str], t.Tuple[str, ...]] - -# the possible types for HTTP headers -HeadersValue = t.Union[ - "Headers", - t.Mapping[str, HeaderValue], - t.Sequence[t.Tuple[str, HeaderValue]], -] - -# The possible types returned by a route function. -ResponseReturnValue = t.Union[ - ResponseValue, - t.Tuple[ResponseValue, HeadersValue], - t.Tuple[ResponseValue, int], - t.Tuple[ResponseValue, int, HeadersValue], - "WSGIApplication", -] - -# Allow any subclass of werkzeug.Response, such as the one from Flask, -# as a callback argument. Using werkzeug.Response directly makes a -# callback annotated with flask.Response fail type checking. -ResponseClass = t.TypeVar("ResponseClass", bound="Response") - -AppOrBlueprintKey = t.Optional[str] # The App key is None, whereas blueprints are named -AfterRequestCallable = t.Union[ - t.Callable[[ResponseClass], ResponseClass], - t.Callable[[ResponseClass], t.Awaitable[ResponseClass]], -] -BeforeFirstRequestCallable = t.Union[ - t.Callable[[], None], t.Callable[[], t.Awaitable[None]] -] -BeforeRequestCallable = t.Union[ - t.Callable[[], t.Optional[ResponseReturnValue]], - t.Callable[[], t.Awaitable[t.Optional[ResponseReturnValue]]], -] -ShellContextProcessorCallable = t.Callable[[], t.Dict[str, t.Any]] -TeardownCallable = t.Union[ - t.Callable[[t.Optional[BaseException]], None], - t.Callable[[t.Optional[BaseException]], t.Awaitable[None]], -] -TemplateContextProcessorCallable = t.Union[ - t.Callable[[], t.Dict[str, t.Any]], - t.Callable[[], t.Awaitable[t.Dict[str, t.Any]]], -] -TemplateFilterCallable = t.Callable[..., t.Any] -TemplateGlobalCallable = t.Callable[..., t.Any] -TemplateTestCallable = t.Callable[..., bool] -URLDefaultCallable = t.Callable[[str, t.Dict[str, t.Any]], None] -URLValuePreprocessorCallable = t.Callable[ - [t.Optional[str], t.Optional[t.Dict[str, t.Any]]], None -] - -# This should take Exception, but that either breaks typing the argument -# with a specific exception, or decorating multiple times with different -# exceptions (and using a union type on the argument). -# https://github.com/pallets/flask/issues/4095 -# https://github.com/pallets/flask/issues/4295 -# https://github.com/pallets/flask/issues/4297 -ErrorHandlerCallable = t.Union[ - t.Callable[[t.Any], ResponseReturnValue], - t.Callable[[t.Any], t.Awaitable[ResponseReturnValue]], -] - -RouteCallable = t.Union[ - t.Callable[..., ResponseReturnValue], - t.Callable[..., t.Awaitable[ResponseReturnValue]], -] diff --git a/myenv/Lib/site-packages/flask/views.py b/myenv/Lib/site-packages/flask/views.py deleted file mode 100644 index 794fdc0..0000000 --- a/myenv/Lib/site-packages/flask/views.py +++ /dev/null @@ -1,191 +0,0 @@ -from __future__ import annotations - -import typing as t - -from . import typing as ft -from .globals import current_app -from .globals import request - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -http_method_funcs = frozenset( - ["get", "post", "head", "options", "delete", "put", "trace", "patch"] -) - - -class View: - """Subclass this class and override :meth:`dispatch_request` to - create a generic class-based view. Call :meth:`as_view` to create a - view function that creates an instance of the class with the given - arguments and calls its ``dispatch_request`` method with any URL - variables. - - See :doc:`views` for a detailed guide. - - .. code-block:: python - - class Hello(View): - init_every_request = False - - def dispatch_request(self, name): - return f"Hello, {name}!" - - app.add_url_rule( - "/hello/", view_func=Hello.as_view("hello") - ) - - Set :attr:`methods` on the class to change what methods the view - accepts. - - Set :attr:`decorators` on the class to apply a list of decorators to - the generated view function. Decorators applied to the class itself - will not be applied to the generated view function! - - Set :attr:`init_every_request` to ``False`` for efficiency, unless - you need to store request-global data on ``self``. - """ - - #: The methods this view is registered for. Uses the same default - #: (``["GET", "HEAD", "OPTIONS"]``) as ``route`` and - #: ``add_url_rule`` by default. - methods: t.ClassVar[t.Collection[str] | None] = None - - #: Control whether the ``OPTIONS`` method is handled automatically. - #: Uses the same default (``True``) as ``route`` and - #: ``add_url_rule`` by default. - provide_automatic_options: t.ClassVar[bool | None] = None - - #: A list of decorators to apply, in order, to the generated view - #: function. Remember that ``@decorator`` syntax is applied bottom - #: to top, so the first decorator in the list would be the bottom - #: decorator. - #: - #: .. versionadded:: 0.8 - decorators: t.ClassVar[list[t.Callable[[F], F]]] = [] - - #: Create a new instance of this view class for every request by - #: default. If a view subclass sets this to ``False``, the same - #: instance is used for every request. - #: - #: A single instance is more efficient, especially if complex setup - #: is done during init. However, storing data on ``self`` is no - #: longer safe across requests, and :data:`~flask.g` should be used - #: instead. - #: - #: .. versionadded:: 2.2 - init_every_request: t.ClassVar[bool] = True - - def dispatch_request(self) -> ft.ResponseReturnValue: - """The actual view function behavior. Subclasses must override - this and return a valid response. Any variables from the URL - rule are passed as keyword arguments. - """ - raise NotImplementedError() - - @classmethod - def as_view( - cls, name: str, *class_args: t.Any, **class_kwargs: t.Any - ) -> ft.RouteCallable: - """Convert the class into a view function that can be registered - for a route. - - By default, the generated view will create a new instance of the - view class for every request and call its - :meth:`dispatch_request` method. If the view class sets - :attr:`init_every_request` to ``False``, the same instance will - be used for every request. - - Except for ``name``, all other arguments passed to this method - are forwarded to the view class ``__init__`` method. - - .. versionchanged:: 2.2 - Added the ``init_every_request`` class attribute. - """ - if cls.init_every_request: - - def view(**kwargs: t.Any) -> ft.ResponseReturnValue: - self = view.view_class( # type: ignore[attr-defined] - *class_args, **class_kwargs - ) - return current_app.ensure_sync(self.dispatch_request)(**kwargs) # type: ignore[no-any-return] - - else: - self = cls(*class_args, **class_kwargs) - - def view(**kwargs: t.Any) -> ft.ResponseReturnValue: - return current_app.ensure_sync(self.dispatch_request)(**kwargs) # type: ignore[no-any-return] - - if cls.decorators: - view.__name__ = name - view.__module__ = cls.__module__ - for decorator in cls.decorators: - view = decorator(view) - - # We attach the view class to the view function for two reasons: - # first of all it allows us to easily figure out what class-based - # view this thing came from, secondly it's also used for instantiating - # the view class so you can actually replace it with something else - # for testing purposes and debugging. - view.view_class = cls # type: ignore - view.__name__ = name - view.__doc__ = cls.__doc__ - view.__module__ = cls.__module__ - view.methods = cls.methods # type: ignore - view.provide_automatic_options = cls.provide_automatic_options # type: ignore - return view - - -class MethodView(View): - """Dispatches request methods to the corresponding instance methods. - For example, if you implement a ``get`` method, it will be used to - handle ``GET`` requests. - - This can be useful for defining a REST API. - - :attr:`methods` is automatically set based on the methods defined on - the class. - - See :doc:`views` for a detailed guide. - - .. code-block:: python - - class CounterAPI(MethodView): - def get(self): - return str(session.get("counter", 0)) - - def post(self): - session["counter"] = session.get("counter", 0) + 1 - return redirect(url_for("counter")) - - app.add_url_rule( - "/counter", view_func=CounterAPI.as_view("counter") - ) - """ - - def __init_subclass__(cls, **kwargs: t.Any) -> None: - super().__init_subclass__(**kwargs) - - if "methods" not in cls.__dict__: - methods = set() - - for base in cls.__bases__: - if getattr(base, "methods", None): - methods.update(base.methods) # type: ignore[attr-defined] - - for key in http_method_funcs: - if hasattr(cls, key): - methods.add(key.upper()) - - if methods: - cls.methods = methods - - def dispatch_request(self, **kwargs: t.Any) -> ft.ResponseReturnValue: - meth = getattr(self, request.method.lower(), None) - - # If the request method is HEAD and we don't have a handler for it - # retry with GET. - if meth is None and request.method == "HEAD": - meth = getattr(self, "get", None) - - assert meth is not None, f"Unimplemented method {request.method!r}" - return current_app.ensure_sync(meth)(**kwargs) # type: ignore[no-any-return] diff --git a/myenv/Lib/site-packages/flask/wrappers.py b/myenv/Lib/site-packages/flask/wrappers.py deleted file mode 100644 index c1eca80..0000000 --- a/myenv/Lib/site-packages/flask/wrappers.py +++ /dev/null @@ -1,174 +0,0 @@ -from __future__ import annotations - -import typing as t - -from werkzeug.exceptions import BadRequest -from werkzeug.exceptions import HTTPException -from werkzeug.wrappers import Request as RequestBase -from werkzeug.wrappers import Response as ResponseBase - -from . import json -from .globals import current_app -from .helpers import _split_blueprint_path - -if t.TYPE_CHECKING: # pragma: no cover - from werkzeug.routing import Rule - - -class Request(RequestBase): - """The request object used by default in Flask. Remembers the - matched endpoint and view arguments. - - It is what ends up as :class:`~flask.request`. If you want to replace - the request object used you can subclass this and set - :attr:`~flask.Flask.request_class` to your subclass. - - The request object is a :class:`~werkzeug.wrappers.Request` subclass and - provides all of the attributes Werkzeug defines plus a few Flask - specific ones. - """ - - json_module: t.Any = json - - #: The internal URL rule that matched the request. This can be - #: useful to inspect which methods are allowed for the URL from - #: a before/after handler (``request.url_rule.methods``) etc. - #: Though if the request's method was invalid for the URL rule, - #: the valid list is available in ``routing_exception.valid_methods`` - #: instead (an attribute of the Werkzeug exception - #: :exc:`~werkzeug.exceptions.MethodNotAllowed`) - #: because the request was never internally bound. - #: - #: .. versionadded:: 0.6 - url_rule: Rule | None = None - - #: A dict of view arguments that matched the request. If an exception - #: happened when matching, this will be ``None``. - view_args: dict[str, t.Any] | None = None - - #: If matching the URL failed, this is the exception that will be - #: raised / was raised as part of the request handling. This is - #: usually a :exc:`~werkzeug.exceptions.NotFound` exception or - #: something similar. - routing_exception: HTTPException | None = None - - @property - def max_content_length(self) -> int | None: # type: ignore[override] - """Read-only view of the ``MAX_CONTENT_LENGTH`` config key.""" - if current_app: - return current_app.config["MAX_CONTENT_LENGTH"] # type: ignore[no-any-return] - else: - return None - - @property - def endpoint(self) -> str | None: - """The endpoint that matched the request URL. - - This will be ``None`` if matching failed or has not been - performed yet. - - This in combination with :attr:`view_args` can be used to - reconstruct the same URL or a modified URL. - """ - if self.url_rule is not None: - return self.url_rule.endpoint - - return None - - @property - def blueprint(self) -> str | None: - """The registered name of the current blueprint. - - This will be ``None`` if the endpoint is not part of a - blueprint, or if URL matching failed or has not been performed - yet. - - This does not necessarily match the name the blueprint was - created with. It may have been nested, or registered with a - different name. - """ - endpoint = self.endpoint - - if endpoint is not None and "." in endpoint: - return endpoint.rpartition(".")[0] - - return None - - @property - def blueprints(self) -> list[str]: - """The registered names of the current blueprint upwards through - parent blueprints. - - This will be an empty list if there is no current blueprint, or - if URL matching failed. - - .. versionadded:: 2.0.1 - """ - name = self.blueprint - - if name is None: - return [] - - return _split_blueprint_path(name) - - def _load_form_data(self) -> None: - super()._load_form_data() - - # In debug mode we're replacing the files multidict with an ad-hoc - # subclass that raises a different error for key errors. - if ( - current_app - and current_app.debug - and self.mimetype != "multipart/form-data" - and not self.files - ): - from .debughelpers import attach_enctype_error_multidict - - attach_enctype_error_multidict(self) - - def on_json_loading_failed(self, e: ValueError | None) -> t.Any: - try: - return super().on_json_loading_failed(e) - except BadRequest as e: - if current_app and current_app.debug: - raise - - raise BadRequest() from e - - -class Response(ResponseBase): - """The response object that is used by default in Flask. Works like the - response object from Werkzeug but is set to have an HTML mimetype by - default. Quite often you don't have to create this object yourself because - :meth:`~flask.Flask.make_response` will take care of that for you. - - If you want to replace the response object used you can subclass this and - set :attr:`~flask.Flask.response_class` to your subclass. - - .. versionchanged:: 1.0 - JSON support is added to the response, like the request. This is useful - when testing to get the test client response data as JSON. - - .. versionchanged:: 1.0 - - Added :attr:`max_cookie_size`. - """ - - default_mimetype: str | None = "text/html" - - json_module = json - - autocorrect_location_header = False - - @property - def max_cookie_size(self) -> int: # type: ignore - """Read-only view of the :data:`MAX_COOKIE_SIZE` config key. - - See :attr:`~werkzeug.wrappers.Response.max_cookie_size` in - Werkzeug's docs. - """ - if current_app: - return current_app.config["MAX_COOKIE_SIZE"] # type: ignore[no-any-return] - - # return Werkzeug's default when not in an app context - return super().max_cookie_size diff --git a/myenv/Lib/site-packages/flask_bcrypt.py b/myenv/Lib/site-packages/flask_bcrypt.py deleted file mode 100644 index 994985a..0000000 --- a/myenv/Lib/site-packages/flask_bcrypt.py +++ /dev/null @@ -1,225 +0,0 @@ -''' - flaskext.bcrypt - --------------- - - A Flask extension providing bcrypt hashing and comparison facilities. - - :copyright: (c) 2011 by Max Countryman. - :license: BSD, see LICENSE for more details. -''' - -from __future__ import absolute_import -from __future__ import print_function - -__version_info__ = ('1', '0', '1') -__version__ = '.'.join(__version_info__) -__author__ = 'Max Countryman' -__license__ = 'BSD' -__copyright__ = '(c) 2011 by Max Countryman' -__all__ = ['Bcrypt', 'check_password_hash', 'generate_password_hash'] - -import hmac - -try: - import bcrypt -except ImportError as e: - print('bcrypt is required to use Flask-Bcrypt') - raise e - -import hashlib - - -def generate_password_hash(password, rounds=None): - '''This helper function wraps the eponymous method of :class:`Bcrypt`. It - is intended to be used as a helper function at the expense of the - configuration variable provided when passing back the app object. In other - words this shortcut does not make use of the app object at all. - - To use this function, simply import it from the module and use it in a - similar fashion as the original method would be used. Here is a quick - example:: - - from flask_bcrypt import generate_password_hash - pw_hash = generate_password_hash('hunter2', 10) - - :param password: The password to be hashed. - :param rounds: The optional number of rounds. - ''' - return Bcrypt().generate_password_hash(password, rounds) - - -def check_password_hash(pw_hash, password): - '''This helper function wraps the eponymous method of :class:`Bcrypt.` It - is intended to be used as a helper function at the expense of the - configuration variable provided when passing back the app object. In other - words this shortcut does not make use of the app object at all. - - To use this function, simply import it from the module and use it in a - similar fashion as the original method would be used. Here is a quick - example:: - - from flask_bcrypt import check_password_hash - check_password_hash(pw_hash, 'hunter2') # returns True - - :param pw_hash: The hash to be compared against. - :param password: The password to compare. - ''' - return Bcrypt().check_password_hash(pw_hash, password) - - -class Bcrypt(object): - '''Bcrypt class container for password hashing and checking logic using - bcrypt, of course. This class may be used to intialize your Flask app - object. The purpose is to provide a simple interface for overriding - Werkzeug's built-in password hashing utilities. - - Although such methods are not actually overriden, the API is intentionally - made similar so that existing applications which make use of the previous - hashing functions might be easily adapted to the stronger facility of - bcrypt. - - To get started you will wrap your application's app object something like - this:: - - app = Flask(__name__) - bcrypt = Bcrypt(app) - - Now the two primary utility methods are exposed via this object, `bcrypt`. - So in the context of the application, important data, such as passwords, - could be hashed using this syntax:: - - password = 'hunter2' - pw_hash = bcrypt.generate_password_hash(password) - - Once hashed, the value is irreversible. However in the case of validating - logins a simple hashing of candidate password and subsequent comparison. - Importantly a comparison should be done in constant time. This helps - prevent timing attacks. A simple utility method is provided for this:: - - candidate = 'secret' - bcrypt.check_password_hash(pw_hash, candidate) - - If both the candidate and the existing password hash are a match - `check_password_hash` returns True. Otherwise, it returns False. - - .. admonition:: Namespacing Issues - - It's worth noting that if you use the format, `bcrypt = Bcrypt(app)` - you are effectively overriding the bcrypt module. Though it's unlikely - you would need to access the module outside of the scope of the - extension be aware that it's overriden. - - Alternatively consider using a different name, such as `flask_bcrypt - = Bcrypt(app)` to prevent naming collisions. - - Additionally a configuration value for `BCRYPT_LOG_ROUNDS` may be set in - the configuration of the Flask app. If none is provided this will - internally be assigned to 12. (This value is used in determining the - complexity of the encryption, see bcrypt for more details.) - - You may also set the hash version using the `BCRYPT_HASH_PREFIX` field in - the configuration of the Flask app. If not set, this will default to `2b`. - (See bcrypt for more details) - - By default, the bcrypt algorithm has a maximum password length of 72 bytes - and ignores any bytes beyond that. A common workaround is to hash the - given password using a cryptographic hash (such as `sha256`), take its - hexdigest to prevent NULL byte problems, and hash the result with bcrypt. - If the `BCRYPT_HANDLE_LONG_PASSWORDS` configuration value is set to `True`, - the workaround described above will be enabled. - **Warning: do not enable this option on a project that is already using - Flask-Bcrypt, or you will break password checking.** - **Warning: if this option is enabled on an existing project, disabling it - will break password checking.** - - :param app: The Flask application object. Defaults to None. - ''' - - _log_rounds = 12 - _prefix = '2b' - _handle_long_passwords = False - - def __init__(self, app=None): - if app is not None: - self.init_app(app) - - def init_app(self, app): - '''Initalizes the application with the extension. - - :param app: The Flask application object. - ''' - self._log_rounds = app.config.get('BCRYPT_LOG_ROUNDS', 12) - self._prefix = app.config.get('BCRYPT_HASH_PREFIX', '2b') - self._handle_long_passwords = app.config.get( - 'BCRYPT_HANDLE_LONG_PASSWORDS', False) - - def _unicode_to_bytes(self, unicode_string): - '''Converts a unicode string to a bytes object. - - :param unicode_string: The unicode string to convert.''' - if isinstance(unicode_string, str): - bytes_object = bytes(unicode_string, 'utf-8') - else: - bytes_object = unicode_string - return bytes_object - - def generate_password_hash(self, password, rounds=None, prefix=None): - '''Generates a password hash using bcrypt. Specifying `rounds` - sets the log_rounds parameter of `bcrypt.gensalt()` which determines - the complexity of the salt. 12 is the default value. Specifying `prefix` - sets the `prefix` parameter of `bcrypt.gensalt()` which determines the - version of the algorithm used to create the hash. - - Example usage of :class:`generate_password_hash` might look something - like this:: - - pw_hash = bcrypt.generate_password_hash('secret', 10) - - :param password: The password to be hashed. - :param rounds: The optional number of rounds. - :param prefix: The algorithm version to use. - ''' - - if not password: - raise ValueError('Password must be non-empty.') - - if rounds is None: - rounds = self._log_rounds - if prefix is None: - prefix = self._prefix - - # Python 3 unicode strings must be encoded as bytes before hashing. - password = self._unicode_to_bytes(password) - prefix = self._unicode_to_bytes(prefix) - - if self._handle_long_passwords: - password = hashlib.sha256(password).hexdigest() - password = self._unicode_to_bytes(password) - - salt = bcrypt.gensalt(rounds=rounds, prefix=prefix) - return bcrypt.hashpw(password, salt) - - def check_password_hash(self, pw_hash, password): - '''Tests a password hash against a candidate password. The candidate - password is first hashed and then subsequently compared in constant - time to the existing hash. This will either return `True` or `False`. - - Example usage of :class:`check_password_hash` would look something - like this:: - - pw_hash = bcrypt.generate_password_hash('secret', 10) - bcrypt.check_password_hash(pw_hash, 'secret') # returns True - - :param pw_hash: The hash to be compared against. - :param password: The password to compare. - ''' - - # Python 3 unicode strings must be encoded as bytes before hashing. - pw_hash = self._unicode_to_bytes(pw_hash) - password = self._unicode_to_bytes(password) - - if self._handle_long_passwords: - password = hashlib.sha256(password).hexdigest() - password = self._unicode_to_bytes(password) - - return hmac.compare_digest(bcrypt.hashpw(password, pw_hash), pw_hash) diff --git a/myenv/Lib/site-packages/flask_login/__about__.py b/myenv/Lib/site-packages/flask_login/__about__.py deleted file mode 100644 index 1918d54..0000000 --- a/myenv/Lib/site-packages/flask_login/__about__.py +++ /dev/null @@ -1,10 +0,0 @@ -__title__ = "Flask-Login" -__description__ = "User session management for Flask" -__url__ = "https://github.com/maxcountryman/flask-login" -__version_info__ = ("0", "6", "3") -__version__ = ".".join(__version_info__) -__author__ = "Matthew Frazier" -__author_email__ = "leafstormrush@gmail.com" -__maintainer__ = "Max Countryman" -__license__ = "MIT" -__copyright__ = "(c) 2011 by Matthew Frazier" diff --git a/myenv/Lib/site-packages/flask_login/__init__.py b/myenv/Lib/site-packages/flask_login/__init__.py deleted file mode 100644 index fbe9c3e..0000000 --- a/myenv/Lib/site-packages/flask_login/__init__.py +++ /dev/null @@ -1,94 +0,0 @@ -from .__about__ import __version__ -from .config import AUTH_HEADER_NAME -from .config import COOKIE_DURATION -from .config import COOKIE_HTTPONLY -from .config import COOKIE_NAME -from .config import COOKIE_SECURE -from .config import ID_ATTRIBUTE -from .config import LOGIN_MESSAGE -from .config import LOGIN_MESSAGE_CATEGORY -from .config import REFRESH_MESSAGE -from .config import REFRESH_MESSAGE_CATEGORY -from .login_manager import LoginManager -from .mixins import AnonymousUserMixin -from .mixins import UserMixin -from .signals import session_protected -from .signals import user_accessed -from .signals import user_loaded_from_cookie -from .signals import user_loaded_from_request -from .signals import user_logged_in -from .signals import user_logged_out -from .signals import user_login_confirmed -from .signals import user_needs_refresh -from .signals import user_unauthorized -from .test_client import FlaskLoginClient -from .utils import confirm_login -from .utils import current_user -from .utils import decode_cookie -from .utils import encode_cookie -from .utils import fresh_login_required -from .utils import login_fresh -from .utils import login_remembered -from .utils import login_required -from .utils import login_url -from .utils import login_user -from .utils import logout_user -from .utils import make_next_param -from .utils import set_login_view - -__all__ = [ - "__version__", - "AUTH_HEADER_NAME", - "COOKIE_DURATION", - "COOKIE_HTTPONLY", - "COOKIE_NAME", - "COOKIE_SECURE", - "ID_ATTRIBUTE", - "LOGIN_MESSAGE", - "LOGIN_MESSAGE_CATEGORY", - "REFRESH_MESSAGE", - "REFRESH_MESSAGE_CATEGORY", - "LoginManager", - "AnonymousUserMixin", - "UserMixin", - "session_protected", - "user_accessed", - "user_loaded_from_cookie", - "user_loaded_from_request", - "user_logged_in", - "user_logged_out", - "user_login_confirmed", - "user_needs_refresh", - "user_unauthorized", - "FlaskLoginClient", - "confirm_login", - "current_user", - "decode_cookie", - "encode_cookie", - "fresh_login_required", - "login_fresh", - "login_remembered", - "login_required", - "login_url", - "login_user", - "logout_user", - "make_next_param", - "set_login_view", -] - - -def __getattr__(name): - if name == "user_loaded_from_header": - import warnings - from .signals import _user_loaded_from_header - - warnings.warn( - "'user_loaded_from_header' is deprecated and will be" - " removed in Flask-Login 0.7. Use" - " 'user_loaded_from_request' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _user_loaded_from_header - - raise AttributeError(name) diff --git a/myenv/Lib/site-packages/flask_login/config.py b/myenv/Lib/site-packages/flask_login/config.py deleted file mode 100644 index fe2db2c..0000000 --- a/myenv/Lib/site-packages/flask_login/config.py +++ /dev/null @@ -1,55 +0,0 @@ -from datetime import timedelta - -#: The default name of the "remember me" cookie (``remember_token``) -COOKIE_NAME = "remember_token" - -#: The default time before the "remember me" cookie expires (365 days). -COOKIE_DURATION = timedelta(days=365) - -#: Whether the "remember me" cookie requires Secure; defaults to ``False`` -COOKIE_SECURE = False - -#: Whether the "remember me" cookie uses HttpOnly or not; defaults to ``True`` -COOKIE_HTTPONLY = True - -#: Whether the "remember me" cookie requires same origin; defaults to ``None`` -COOKIE_SAMESITE = None - -#: The default flash message to display when users need to log in. -LOGIN_MESSAGE = "Please log in to access this page." - -#: The default flash message category to display when users need to log in. -LOGIN_MESSAGE_CATEGORY = "message" - -#: The default flash message to display when users need to reauthenticate. -REFRESH_MESSAGE = "Please reauthenticate to access this page." - -#: The default flash message category to display when users need to -#: reauthenticate. -REFRESH_MESSAGE_CATEGORY = "message" - -#: The default attribute to retreive the str id of the user -ID_ATTRIBUTE = "get_id" - -#: Default name of the auth header (``Authorization``) -AUTH_HEADER_NAME = "Authorization" - -#: A set of session keys that are populated by Flask-Login. Use this set to -#: purge keys safely and accurately. -SESSION_KEYS = { - "_user_id", - "_remember", - "_remember_seconds", - "_id", - "_fresh", - "next", -} - -#: A set of HTTP methods which are exempt from `login_required` and -#: `fresh_login_required`. By default, this is just ``OPTIONS``. -EXEMPT_METHODS = {"OPTIONS"} - -#: If true, the page the user is attempting to access is stored in the session -#: rather than a url parameter when redirecting to the login view; defaults to -#: ``False``. -USE_SESSION_FOR_NEXT = False diff --git a/myenv/Lib/site-packages/flask_login/login_manager.py b/myenv/Lib/site-packages/flask_login/login_manager.py deleted file mode 100644 index 49d0844..0000000 --- a/myenv/Lib/site-packages/flask_login/login_manager.py +++ /dev/null @@ -1,543 +0,0 @@ -from datetime import datetime -from datetime import timedelta - -from flask import abort -from flask import current_app -from flask import flash -from flask import g -from flask import has_app_context -from flask import redirect -from flask import request -from flask import session - -from .config import AUTH_HEADER_NAME -from .config import COOKIE_DURATION -from .config import COOKIE_HTTPONLY -from .config import COOKIE_NAME -from .config import COOKIE_SAMESITE -from .config import COOKIE_SECURE -from .config import ID_ATTRIBUTE -from .config import LOGIN_MESSAGE -from .config import LOGIN_MESSAGE_CATEGORY -from .config import REFRESH_MESSAGE -from .config import REFRESH_MESSAGE_CATEGORY -from .config import SESSION_KEYS -from .config import USE_SESSION_FOR_NEXT -from .mixins import AnonymousUserMixin -from .signals import session_protected -from .signals import user_accessed -from .signals import user_loaded_from_cookie -from .signals import user_loaded_from_request -from .signals import user_needs_refresh -from .signals import user_unauthorized -from .utils import _create_identifier -from .utils import _user_context_processor -from .utils import decode_cookie -from .utils import encode_cookie -from .utils import expand_login_view -from .utils import login_url as make_login_url -from .utils import make_next_param - - -class LoginManager: - """This object is used to hold the settings used for logging in. Instances - of :class:`LoginManager` are *not* bound to specific apps, so you can - create one in the main body of your code and then bind it to your - app in a factory function. - """ - - def __init__(self, app=None, add_context_processor=True): - #: A class or factory function that produces an anonymous user, which - #: is used when no one is logged in. - self.anonymous_user = AnonymousUserMixin - - #: The name of the view to redirect to when the user needs to log in. - #: (This can be an absolute URL as well, if your authentication - #: machinery is external to your application.) - self.login_view = None - - #: Names of views to redirect to when the user needs to log in, - #: per blueprint. If the key value is set to None the value of - #: :attr:`login_view` will be used instead. - self.blueprint_login_views = {} - - #: The message to flash when a user is redirected to the login page. - self.login_message = LOGIN_MESSAGE - - #: The message category to flash when a user is redirected to the login - #: page. - self.login_message_category = LOGIN_MESSAGE_CATEGORY - - #: The name of the view to redirect to when the user needs to - #: reauthenticate. - self.refresh_view = None - - #: The message to flash when a user is redirected to the 'needs - #: refresh' page. - self.needs_refresh_message = REFRESH_MESSAGE - - #: The message category to flash when a user is redirected to the - #: 'needs refresh' page. - self.needs_refresh_message_category = REFRESH_MESSAGE_CATEGORY - - #: The mode to use session protection in. This can be either - #: ``'basic'`` (the default) or ``'strong'``, or ``None`` to disable - #: it. - self.session_protection = "basic" - - #: If present, used to translate flash messages ``self.login_message`` - #: and ``self.needs_refresh_message`` - self.localize_callback = None - - self.unauthorized_callback = None - - self.needs_refresh_callback = None - - self.id_attribute = ID_ATTRIBUTE - - self._user_callback = None - - self._header_callback = None - - self._request_callback = None - - self._session_identifier_generator = _create_identifier - - if app is not None: - self.init_app(app, add_context_processor) - - def setup_app(self, app, add_context_processor=True): # pragma: no cover - """ - This method has been deprecated. Please use - :meth:`LoginManager.init_app` instead. - """ - import warnings - - warnings.warn( - "'setup_app' is deprecated and will be removed in" - " Flask-Login 0.7. Use 'init_app' instead.", - DeprecationWarning, - stacklevel=2, - ) - self.init_app(app, add_context_processor) - - def init_app(self, app, add_context_processor=True): - """ - Configures an application. This registers an `after_request` call, and - attaches this `LoginManager` to it as `app.login_manager`. - - :param app: The :class:`flask.Flask` object to configure. - :type app: :class:`flask.Flask` - :param add_context_processor: Whether to add a context processor to - the app that adds a `current_user` variable to the template. - Defaults to ``True``. - :type add_context_processor: bool - """ - app.login_manager = self - app.after_request(self._update_remember_cookie) - - if add_context_processor: - app.context_processor(_user_context_processor) - - def unauthorized(self): - """ - This is called when the user is required to log in. If you register a - callback with :meth:`LoginManager.unauthorized_handler`, then it will - be called. Otherwise, it will take the following actions: - - - Flash :attr:`LoginManager.login_message` to the user. - - - If the app is using blueprints find the login view for - the current blueprint using `blueprint_login_views`. If the app - is not using blueprints or the login view for the current - blueprint is not specified use the value of `login_view`. - - - Redirect the user to the login view. (The page they were - attempting to access will be passed in the ``next`` query - string variable, so you can redirect there if present instead - of the homepage. Alternatively, it will be added to the session - as ``next`` if USE_SESSION_FOR_NEXT is set.) - - If :attr:`LoginManager.login_view` is not defined, then it will simply - raise a HTTP 401 (Unauthorized) error instead. - - This should be returned from a view or before/after_request function, - otherwise the redirect will have no effect. - """ - user_unauthorized.send(current_app._get_current_object()) - - if self.unauthorized_callback: - return self.unauthorized_callback() - - if request.blueprint in self.blueprint_login_views: - login_view = self.blueprint_login_views[request.blueprint] - else: - login_view = self.login_view - - if not login_view: - abort(401) - - if self.login_message: - if self.localize_callback is not None: - flash( - self.localize_callback(self.login_message), - category=self.login_message_category, - ) - else: - flash(self.login_message, category=self.login_message_category) - - config = current_app.config - if config.get("USE_SESSION_FOR_NEXT", USE_SESSION_FOR_NEXT): - login_url = expand_login_view(login_view) - session["_id"] = self._session_identifier_generator() - session["next"] = make_next_param(login_url, request.url) - redirect_url = make_login_url(login_view) - else: - redirect_url = make_login_url(login_view, next_url=request.url) - - return redirect(redirect_url) - - def user_loader(self, callback): - """ - This sets the callback for reloading a user from the session. The - function you set should take a user ID (a ``str``) and return a - user object, or ``None`` if the user does not exist. - - :param callback: The callback for retrieving a user object. - :type callback: callable - """ - self._user_callback = callback - return self.user_callback - - @property - def user_callback(self): - """Gets the user_loader callback set by user_loader decorator.""" - return self._user_callback - - def request_loader(self, callback): - """ - This sets the callback for loading a user from a Flask request. - The function you set should take Flask request object and - return a user object, or `None` if the user does not exist. - - :param callback: The callback for retrieving a user object. - :type callback: callable - """ - self._request_callback = callback - return self.request_callback - - @property - def request_callback(self): - """Gets the request_loader callback set by request_loader decorator.""" - return self._request_callback - - def unauthorized_handler(self, callback): - """ - This will set the callback for the `unauthorized` method, which among - other things is used by `login_required`. It takes no arguments, and - should return a response to be sent to the user instead of their - normal view. - - :param callback: The callback for unauthorized users. - :type callback: callable - """ - self.unauthorized_callback = callback - return callback - - def needs_refresh_handler(self, callback): - """ - This will set the callback for the `needs_refresh` method, which among - other things is used by `fresh_login_required`. It takes no arguments, - and should return a response to be sent to the user instead of their - normal view. - - :param callback: The callback for unauthorized users. - :type callback: callable - """ - self.needs_refresh_callback = callback - return callback - - def needs_refresh(self): - """ - This is called when the user is logged in, but they need to be - reauthenticated because their session is stale. If you register a - callback with `needs_refresh_handler`, then it will be called. - Otherwise, it will take the following actions: - - - Flash :attr:`LoginManager.needs_refresh_message` to the user. - - - Redirect the user to :attr:`LoginManager.refresh_view`. (The page - they were attempting to access will be passed in the ``next`` - query string variable, so you can redirect there if present - instead of the homepage.) - - If :attr:`LoginManager.refresh_view` is not defined, then it will - simply raise a HTTP 401 (Unauthorized) error instead. - - This should be returned from a view or before/after_request function, - otherwise the redirect will have no effect. - """ - user_needs_refresh.send(current_app._get_current_object()) - - if self.needs_refresh_callback: - return self.needs_refresh_callback() - - if not self.refresh_view: - abort(401) - - if self.needs_refresh_message: - if self.localize_callback is not None: - flash( - self.localize_callback(self.needs_refresh_message), - category=self.needs_refresh_message_category, - ) - else: - flash( - self.needs_refresh_message, - category=self.needs_refresh_message_category, - ) - - config = current_app.config - if config.get("USE_SESSION_FOR_NEXT", USE_SESSION_FOR_NEXT): - login_url = expand_login_view(self.refresh_view) - session["_id"] = self._session_identifier_generator() - session["next"] = make_next_param(login_url, request.url) - redirect_url = make_login_url(self.refresh_view) - else: - login_url = self.refresh_view - redirect_url = make_login_url(login_url, next_url=request.url) - - return redirect(redirect_url) - - def header_loader(self, callback): - """ - This function has been deprecated. Please use - :meth:`LoginManager.request_loader` instead. - - This sets the callback for loading a user from a header value. - The function you set should take an authentication token and - return a user object, or `None` if the user does not exist. - - :param callback: The callback for retrieving a user object. - :type callback: callable - """ - import warnings - - warnings.warn( - "'header_loader' is deprecated and will be removed in" - " Flask-Login 0.7. Use 'request_loader' instead.", - DeprecationWarning, - stacklevel=2, - ) - self._header_callback = callback - return callback - - def _update_request_context_with_user(self, user=None): - """Store the given user as ctx.user.""" - - if user is None: - user = self.anonymous_user() - - g._login_user = user - - def _load_user(self): - """Loads user from session or remember_me cookie as applicable""" - - if self._user_callback is None and self._request_callback is None: - raise Exception( - "Missing user_loader or request_loader. Refer to " - "http://flask-login.readthedocs.io/#how-it-works " - "for more info." - ) - - user_accessed.send(current_app._get_current_object()) - - # Check SESSION_PROTECTION - if self._session_protection_failed(): - return self._update_request_context_with_user() - - user = None - - # Load user from Flask Session - user_id = session.get("_user_id") - if user_id is not None and self._user_callback is not None: - user = self._user_callback(user_id) - - # Load user from Remember Me Cookie or Request Loader - if user is None: - config = current_app.config - cookie_name = config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME) - header_name = config.get("AUTH_HEADER_NAME", AUTH_HEADER_NAME) - has_cookie = ( - cookie_name in request.cookies and session.get("_remember") != "clear" - ) - if has_cookie: - cookie = request.cookies[cookie_name] - user = self._load_user_from_remember_cookie(cookie) - elif self._request_callback: - user = self._load_user_from_request(request) - elif header_name in request.headers: - header = request.headers[header_name] - user = self._load_user_from_header(header) - - return self._update_request_context_with_user(user) - - def _session_protection_failed(self): - sess = session._get_current_object() - ident = self._session_identifier_generator() - - app = current_app._get_current_object() - mode = app.config.get("SESSION_PROTECTION", self.session_protection) - - if not mode or mode not in ["basic", "strong"]: - return False - - # if the sess is empty, it's an anonymous user or just logged out - # so we can skip this - if sess and ident != sess.get("_id", None): - if mode == "basic" or sess.permanent: - if sess.get("_fresh") is not False: - sess["_fresh"] = False - session_protected.send(app) - return False - elif mode == "strong": - for k in SESSION_KEYS: - sess.pop(k, None) - - sess["_remember"] = "clear" - session_protected.send(app) - return True - - return False - - def _load_user_from_remember_cookie(self, cookie): - user_id = decode_cookie(cookie) - if user_id is not None: - session["_user_id"] = user_id - session["_fresh"] = False - user = None - if self._user_callback: - user = self._user_callback(user_id) - if user is not None: - app = current_app._get_current_object() - user_loaded_from_cookie.send(app, user=user) - return user - return None - - def _load_user_from_header(self, header): - if self._header_callback: - user = self._header_callback(header) - if user is not None: - app = current_app._get_current_object() - - from .signals import _user_loaded_from_header - - _user_loaded_from_header.send(app, user=user) - return user - return None - - def _load_user_from_request(self, request): - if self._request_callback: - user = self._request_callback(request) - if user is not None: - app = current_app._get_current_object() - user_loaded_from_request.send(app, user=user) - return user - return None - - def _update_remember_cookie(self, response): - # Don't modify the session unless there's something to do. - if "_remember" not in session and current_app.config.get( - "REMEMBER_COOKIE_REFRESH_EACH_REQUEST" - ): - session["_remember"] = "set" - - if "_remember" in session: - operation = session.pop("_remember", None) - - if operation == "set" and "_user_id" in session: - self._set_cookie(response) - elif operation == "clear": - self._clear_cookie(response) - - return response - - def _set_cookie(self, response): - # cookie settings - config = current_app.config - cookie_name = config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME) - domain = config.get("REMEMBER_COOKIE_DOMAIN") - path = config.get("REMEMBER_COOKIE_PATH", "/") - - secure = config.get("REMEMBER_COOKIE_SECURE", COOKIE_SECURE) - httponly = config.get("REMEMBER_COOKIE_HTTPONLY", COOKIE_HTTPONLY) - samesite = config.get("REMEMBER_COOKIE_SAMESITE", COOKIE_SAMESITE) - - if "_remember_seconds" in session: - duration = timedelta(seconds=session["_remember_seconds"]) - else: - duration = config.get("REMEMBER_COOKIE_DURATION", COOKIE_DURATION) - - # prepare data - data = encode_cookie(str(session["_user_id"])) - - if isinstance(duration, int): - duration = timedelta(seconds=duration) - - try: - expires = datetime.utcnow() + duration - except TypeError as e: - raise Exception( - "REMEMBER_COOKIE_DURATION must be a datetime.timedelta," - f" instead got: {duration}" - ) from e - - # actually set it - response.set_cookie( - cookie_name, - value=data, - expires=expires, - domain=domain, - path=path, - secure=secure, - httponly=httponly, - samesite=samesite, - ) - - def _clear_cookie(self, response): - config = current_app.config - cookie_name = config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME) - domain = config.get("REMEMBER_COOKIE_DOMAIN") - path = config.get("REMEMBER_COOKIE_PATH", "/") - response.delete_cookie(cookie_name, domain=domain, path=path) - - @property - def _login_disabled(self): - """Legacy property, use app.config['LOGIN_DISABLED'] instead.""" - import warnings - - warnings.warn( - "'_login_disabled' is deprecated and will be removed in" - " Flask-Login 0.7. Use 'LOGIN_DISABLED' in 'app.config'" - " instead.", - DeprecationWarning, - stacklevel=2, - ) - - if has_app_context(): - return current_app.config.get("LOGIN_DISABLED", False) - return False - - @_login_disabled.setter - def _login_disabled(self, newvalue): - """Legacy property setter, use app.config['LOGIN_DISABLED'] instead.""" - import warnings - - warnings.warn( - "'_login_disabled' is deprecated and will be removed in" - " Flask-Login 0.7. Use 'LOGIN_DISABLED' in 'app.config'" - " instead.", - DeprecationWarning, - stacklevel=2, - ) - current_app.config["LOGIN_DISABLED"] = newvalue diff --git a/myenv/Lib/site-packages/flask_login/mixins.py b/myenv/Lib/site-packages/flask_login/mixins.py deleted file mode 100644 index 0b3a71b..0000000 --- a/myenv/Lib/site-packages/flask_login/mixins.py +++ /dev/null @@ -1,65 +0,0 @@ -class UserMixin: - """ - This provides default implementations for the methods that Flask-Login - expects user objects to have. - """ - - # Python 3 implicitly set __hash__ to None if we override __eq__ - # We set it back to its default implementation - __hash__ = object.__hash__ - - @property - def is_active(self): - return True - - @property - def is_authenticated(self): - return self.is_active - - @property - def is_anonymous(self): - return False - - def get_id(self): - try: - return str(self.id) - except AttributeError: - raise NotImplementedError("No `id` attribute - override `get_id`") from None - - def __eq__(self, other): - """ - Checks the equality of two `UserMixin` objects using `get_id`. - """ - if isinstance(other, UserMixin): - return self.get_id() == other.get_id() - return NotImplemented - - def __ne__(self, other): - """ - Checks the inequality of two `UserMixin` objects using `get_id`. - """ - equal = self.__eq__(other) - if equal is NotImplemented: - return NotImplemented - return not equal - - -class AnonymousUserMixin: - """ - This is the default object for representing an anonymous user. - """ - - @property - def is_authenticated(self): - return False - - @property - def is_active(self): - return False - - @property - def is_anonymous(self): - return True - - def get_id(self): - return diff --git a/myenv/Lib/site-packages/flask_login/signals.py b/myenv/Lib/site-packages/flask_login/signals.py deleted file mode 100644 index cf9157f..0000000 --- a/myenv/Lib/site-packages/flask_login/signals.py +++ /dev/null @@ -1,61 +0,0 @@ -from flask.signals import Namespace - -_signals = Namespace() - -#: Sent when a user is logged in. In addition to the app (which is the -#: sender), it is passed `user`, which is the user being logged in. -user_logged_in = _signals.signal("logged-in") - -#: Sent when a user is logged out. In addition to the app (which is the -#: sender), it is passed `user`, which is the user being logged out. -user_logged_out = _signals.signal("logged-out") - -#: Sent when the user is loaded from the cookie. In addition to the app (which -#: is the sender), it is passed `user`, which is the user being reloaded. -user_loaded_from_cookie = _signals.signal("loaded-from-cookie") - -#: Sent when the user is loaded from the header. In addition to the app (which -#: is the #: sender), it is passed `user`, which is the user being reloaded. -_user_loaded_from_header = _signals.signal("loaded-from-header") - -#: Sent when the user is loaded from the request. In addition to the app (which -#: is the #: sender), it is passed `user`, which is the user being reloaded. -user_loaded_from_request = _signals.signal("loaded-from-request") - -#: Sent when a user's login is confirmed, marking it as fresh. (It is not -#: called for a normal login.) -#: It receives no additional arguments besides the app. -user_login_confirmed = _signals.signal("login-confirmed") - -#: Sent when the `unauthorized` method is called on a `LoginManager`. It -#: receives no additional arguments besides the app. -user_unauthorized = _signals.signal("unauthorized") - -#: Sent when the `needs_refresh` method is called on a `LoginManager`. It -#: receives no additional arguments besides the app. -user_needs_refresh = _signals.signal("needs-refresh") - -#: Sent whenever the user is accessed/loaded -#: receives no additional arguments besides the app. -user_accessed = _signals.signal("accessed") - -#: Sent whenever session protection takes effect, and a session is either -#: marked non-fresh or deleted. It receives no additional arguments besides -#: the app. -session_protected = _signals.signal("session-protected") - - -def __getattr__(name): - if name == "user_loaded_from_header": - import warnings - - warnings.warn( - "'user_loaded_from_header' is deprecated and will be" - " removed in Flask-Login 0.7. Use" - " 'user_loaded_from_request' instead.", - DeprecationWarning, - stacklevel=2, - ) - return _user_loaded_from_header - - raise AttributeError(name) diff --git a/myenv/Lib/site-packages/flask_login/test_client.py b/myenv/Lib/site-packages/flask_login/test_client.py deleted file mode 100644 index be2a8bf..0000000 --- a/myenv/Lib/site-packages/flask_login/test_client.py +++ /dev/null @@ -1,19 +0,0 @@ -from flask.testing import FlaskClient - - -class FlaskLoginClient(FlaskClient): - """ - A Flask test client that knows how to log in users - using the Flask-Login extension. - """ - - def __init__(self, *args, **kwargs): - user = kwargs.pop("user", None) - fresh = kwargs.pop("fresh_login", True) - - super().__init__(*args, **kwargs) - - if user: - with self.session_transaction() as sess: - sess["_user_id"] = user.get_id() - sess["_fresh"] = fresh diff --git a/myenv/Lib/site-packages/flask_login/utils.py b/myenv/Lib/site-packages/flask_login/utils.py deleted file mode 100644 index 37b2056..0000000 --- a/myenv/Lib/site-packages/flask_login/utils.py +++ /dev/null @@ -1,415 +0,0 @@ -import hmac -from functools import wraps -from hashlib import sha512 -from urllib.parse import parse_qs -from urllib.parse import urlencode -from urllib.parse import urlsplit -from urllib.parse import urlunsplit - -from flask import current_app -from flask import g -from flask import has_request_context -from flask import request -from flask import session -from flask import url_for -from werkzeug.local import LocalProxy - -from .config import COOKIE_NAME -from .config import EXEMPT_METHODS -from .signals import user_logged_in -from .signals import user_logged_out -from .signals import user_login_confirmed - -#: A proxy for the current user. If no user is logged in, this will be an -#: anonymous user -current_user = LocalProxy(lambda: _get_user()) - - -def encode_cookie(payload, key=None): - """ - This will encode a ``str`` value into a cookie, and sign that cookie - with the app's secret key. - - :param payload: The value to encode, as `str`. - :type payload: str - - :param key: The key to use when creating the cookie digest. If not - specified, the SECRET_KEY value from app config will be used. - :type key: str - """ - return f"{payload}|{_cookie_digest(payload, key=key)}" - - -def decode_cookie(cookie, key=None): - """ - This decodes a cookie given by `encode_cookie`. If verification of the - cookie fails, ``None`` will be implicitly returned. - - :param cookie: An encoded cookie. - :type cookie: str - - :param key: The key to use when creating the cookie digest. If not - specified, the SECRET_KEY value from app config will be used. - :type key: str - """ - try: - payload, digest = cookie.rsplit("|", 1) - if hasattr(digest, "decode"): - digest = digest.decode("ascii") # pragma: no cover - except ValueError: - return - - if hmac.compare_digest(_cookie_digest(payload, key=key), digest): - return payload - - -def make_next_param(login_url, current_url): - """ - Reduces the scheme and host from a given URL so it can be passed to - the given `login` URL more efficiently. - - :param login_url: The login URL being redirected to. - :type login_url: str - :param current_url: The URL to reduce. - :type current_url: str - """ - l_url = urlsplit(login_url) - c_url = urlsplit(current_url) - - if (not l_url.scheme or l_url.scheme == c_url.scheme) and ( - not l_url.netloc or l_url.netloc == c_url.netloc - ): - return urlunsplit(("", "", c_url.path, c_url.query, "")) - return current_url - - -def expand_login_view(login_view): - """ - Returns the url for the login view, expanding the view name to a url if - needed. - - :param login_view: The name of the login view or a URL for the login view. - :type login_view: str - """ - if login_view.startswith(("https://", "http://", "/")): - return login_view - - return url_for(login_view) - - -def login_url(login_view, next_url=None, next_field="next"): - """ - Creates a URL for redirecting to a login page. If only `login_view` is - provided, this will just return the URL for it. If `next_url` is provided, - however, this will append a ``next=URL`` parameter to the query string - so that the login view can redirect back to that URL. Flask-Login's default - unauthorized handler uses this function when redirecting to your login url. - To force the host name used, set `FORCE_HOST_FOR_REDIRECTS` to a host. This - prevents from redirecting to external sites if request headers Host or - X-Forwarded-For are present. - - :param login_view: The name of the login view. (Alternately, the actual - URL to the login view.) - :type login_view: str - :param next_url: The URL to give the login view for redirection. - :type next_url: str - :param next_field: What field to store the next URL in. (It defaults to - ``next``.) - :type next_field: str - """ - base = expand_login_view(login_view) - - if next_url is None: - return base - - parsed_result = urlsplit(base) - md = parse_qs(parsed_result.query, keep_blank_values=True) - md[next_field] = make_next_param(base, next_url) - netloc = current_app.config.get("FORCE_HOST_FOR_REDIRECTS") or parsed_result.netloc - parsed_result = parsed_result._replace( - netloc=netloc, query=urlencode(md, doseq=True) - ) - return urlunsplit(parsed_result) - - -def login_fresh(): - """ - This returns ``True`` if the current login is fresh. - """ - return session.get("_fresh", False) - - -def login_remembered(): - """ - This returns ``True`` if the current login is remembered across sessions. - """ - config = current_app.config - cookie_name = config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME) - has_cookie = cookie_name in request.cookies and session.get("_remember") != "clear" - if has_cookie: - cookie = request.cookies[cookie_name] - user_id = decode_cookie(cookie) - return user_id is not None - return False - - -def login_user(user, remember=False, duration=None, force=False, fresh=True): - """ - Logs a user in. You should pass the actual user object to this. If the - user's `is_active` property is ``False``, they will not be logged in - unless `force` is ``True``. - - This will return ``True`` if the log in attempt succeeds, and ``False`` if - it fails (i.e. because the user is inactive). - - :param user: The user object to log in. - :type user: object - :param remember: Whether to remember the user after their session expires. - Defaults to ``False``. - :type remember: bool - :param duration: The amount of time before the remember cookie expires. If - ``None`` the value set in the settings is used. Defaults to ``None``. - :type duration: :class:`datetime.timedelta` - :param force: If the user is inactive, setting this to ``True`` will log - them in regardless. Defaults to ``False``. - :type force: bool - :param fresh: setting this to ``False`` will log in the user with a session - marked as not "fresh". Defaults to ``True``. - :type fresh: bool - """ - if not force and not user.is_active: - return False - - user_id = getattr(user, current_app.login_manager.id_attribute)() - session["_user_id"] = user_id - session["_fresh"] = fresh - session["_id"] = current_app.login_manager._session_identifier_generator() - - if remember: - session["_remember"] = "set" - if duration is not None: - try: - # equal to timedelta.total_seconds() but works with Python 2.6 - session["_remember_seconds"] = ( - duration.microseconds - + (duration.seconds + duration.days * 24 * 3600) * 10**6 - ) / 10.0**6 - except AttributeError as e: - raise Exception( - f"duration must be a datetime.timedelta, instead got: {duration}" - ) from e - - current_app.login_manager._update_request_context_with_user(user) - user_logged_in.send(current_app._get_current_object(), user=_get_user()) - return True - - -def logout_user(): - """ - Logs a user out. (You do not need to pass the actual user.) This will - also clean up the remember me cookie if it exists. - """ - - user = _get_user() - - if "_user_id" in session: - session.pop("_user_id") - - if "_fresh" in session: - session.pop("_fresh") - - if "_id" in session: - session.pop("_id") - - cookie_name = current_app.config.get("REMEMBER_COOKIE_NAME", COOKIE_NAME) - if cookie_name in request.cookies: - session["_remember"] = "clear" - if "_remember_seconds" in session: - session.pop("_remember_seconds") - - user_logged_out.send(current_app._get_current_object(), user=user) - - current_app.login_manager._update_request_context_with_user() - return True - - -def confirm_login(): - """ - This sets the current session as fresh. Sessions become stale when they - are reloaded from a cookie. - """ - session["_fresh"] = True - session["_id"] = current_app.login_manager._session_identifier_generator() - user_login_confirmed.send(current_app._get_current_object()) - - -def login_required(func): - """ - If you decorate a view with this, it will ensure that the current user is - logged in and authenticated before calling the actual view. (If they are - not, it calls the :attr:`LoginManager.unauthorized` callback.) For - example:: - - @app.route('/post') - @login_required - def post(): - pass - - If there are only certain times you need to require that your user is - logged in, you can do so with:: - - if not current_user.is_authenticated: - return current_app.login_manager.unauthorized() - - ...which is essentially the code that this function adds to your views. - - It can be convenient to globally turn off authentication when unit testing. - To enable this, if the application configuration variable `LOGIN_DISABLED` - is set to `True`, this decorator will be ignored. - - .. Note :: - - Per `W3 guidelines for CORS preflight requests - `_, - HTTP ``OPTIONS`` requests are exempt from login checks. - - :param func: The view function to decorate. - :type func: function - """ - - @wraps(func) - def decorated_view(*args, **kwargs): - if request.method in EXEMPT_METHODS or current_app.config.get("LOGIN_DISABLED"): - pass - elif not current_user.is_authenticated: - return current_app.login_manager.unauthorized() - - # flask 1.x compatibility - # current_app.ensure_sync is only available in Flask >= 2.0 - if callable(getattr(current_app, "ensure_sync", None)): - return current_app.ensure_sync(func)(*args, **kwargs) - return func(*args, **kwargs) - - return decorated_view - - -def fresh_login_required(func): - """ - If you decorate a view with this, it will ensure that the current user's - login is fresh - i.e. their session was not restored from a 'remember me' - cookie. Sensitive operations, like changing a password or e-mail, should - be protected with this, to impede the efforts of cookie thieves. - - If the user is not authenticated, :meth:`LoginManager.unauthorized` is - called as normal. If they are authenticated, but their session is not - fresh, it will call :meth:`LoginManager.needs_refresh` instead. (In that - case, you will need to provide a :attr:`LoginManager.refresh_view`.) - - Behaves identically to the :func:`login_required` decorator with respect - to configuration variables. - - .. Note :: - - Per `W3 guidelines for CORS preflight requests - `_, - HTTP ``OPTIONS`` requests are exempt from login checks. - - :param func: The view function to decorate. - :type func: function - """ - - @wraps(func) - def decorated_view(*args, **kwargs): - if request.method in EXEMPT_METHODS or current_app.config.get("LOGIN_DISABLED"): - pass - elif not current_user.is_authenticated: - return current_app.login_manager.unauthorized() - elif not login_fresh(): - return current_app.login_manager.needs_refresh() - try: - # current_app.ensure_sync available in Flask >= 2.0 - return current_app.ensure_sync(func)(*args, **kwargs) - except AttributeError: # pragma: no cover - return func(*args, **kwargs) - - return decorated_view - - -def set_login_view(login_view, blueprint=None): - """ - Sets the login view for the app or blueprint. If a blueprint is passed, - the login view is set for this blueprint on ``blueprint_login_views``. - - :param login_view: The user object to log in. - :type login_view: str - :param blueprint: The blueprint which this login view should be set on. - Defaults to ``None``. - :type blueprint: object - """ - - num_login_views = len(current_app.login_manager.blueprint_login_views) - if blueprint is not None or num_login_views != 0: - (current_app.login_manager.blueprint_login_views[blueprint.name]) = login_view - - if ( - current_app.login_manager.login_view is not None - and None not in current_app.login_manager.blueprint_login_views - ): - ( - current_app.login_manager.blueprint_login_views[None] - ) = current_app.login_manager.login_view - - current_app.login_manager.login_view = None - else: - current_app.login_manager.login_view = login_view - - -def _get_user(): - if has_request_context(): - if "_login_user" not in g: - current_app.login_manager._load_user() - - return g._login_user - - return None - - -def _cookie_digest(payload, key=None): - key = _secret_key(key) - - return hmac.new(key, payload.encode("utf-8"), sha512).hexdigest() - - -def _get_remote_addr(): - address = request.headers.get("X-Forwarded-For", request.remote_addr) - if address is not None: - # An 'X-Forwarded-For' header includes a comma separated list of the - # addresses, the first address being the actual remote address. - address = address.encode("utf-8").split(b",")[0].strip() - return address - - -def _create_identifier(): - user_agent = request.headers.get("User-Agent") - if user_agent is not None: - user_agent = user_agent.encode("utf-8") - base = f"{_get_remote_addr()}|{user_agent}" - if str is bytes: - base = str(base, "utf-8", errors="replace") # pragma: no cover - h = sha512() - h.update(base.encode("utf8")) - return h.hexdigest() - - -def _user_context_processor(): - return dict(current_user=_get_user()) - - -def _secret_key(key=None): - if key is None: - key = current_app.config["SECRET_KEY"] - - if isinstance(key, str): # pragma: no cover - key = key.encode("latin1") # ensure bytes - - return key diff --git a/myenv/Lib/site-packages/flask_migrate/__init__.py b/myenv/Lib/site-packages/flask_migrate/__init__.py deleted file mode 100644 index 197f17b..0000000 --- a/myenv/Lib/site-packages/flask_migrate/__init__.py +++ /dev/null @@ -1,266 +0,0 @@ -import argparse -from functools import wraps -import logging -import os -import sys -from flask import current_app, g -from alembic import __version__ as __alembic_version__ -from alembic.config import Config as AlembicConfig -from alembic import command -from alembic.util import CommandError - -alembic_version = tuple([int(v) for v in __alembic_version__.split('.')[0:3]]) -log = logging.getLogger(__name__) - - -class _MigrateConfig(object): - def __init__(self, migrate, db, **kwargs): - self.migrate = migrate - self.db = db - self.directory = migrate.directory - self.configure_args = kwargs - - @property - def metadata(self): - """ - Backwards compatibility, in old releases app.extensions['migrate'] - was set to db, and env.py accessed app.extensions['migrate'].metadata - """ - return self.db.metadata - - -class Config(AlembicConfig): - def __init__(self, *args, **kwargs): - self.template_directory = kwargs.pop('template_directory', None) - super().__init__(*args, **kwargs) - - def get_template_directory(self): - if self.template_directory: - return self.template_directory - package_dir = os.path.abspath(os.path.dirname(__file__)) - return os.path.join(package_dir, 'templates') - - -class Migrate(object): - def __init__(self, app=None, db=None, directory='migrations', command='db', - compare_type=True, render_as_batch=True, **kwargs): - self.configure_callbacks = [] - self.db = db - self.command = command - self.directory = str(directory) - self.alembic_ctx_kwargs = kwargs - self.alembic_ctx_kwargs['compare_type'] = compare_type - self.alembic_ctx_kwargs['render_as_batch'] = render_as_batch - if app is not None and db is not None: - self.init_app(app, db, directory) - - def init_app(self, app, db=None, directory=None, command=None, - compare_type=None, render_as_batch=None, **kwargs): - self.db = db or self.db - self.command = command or self.command - self.directory = str(directory or self.directory) - self.alembic_ctx_kwargs.update(kwargs) - if compare_type is not None: - self.alembic_ctx_kwargs['compare_type'] = compare_type - if render_as_batch is not None: - self.alembic_ctx_kwargs['render_as_batch'] = render_as_batch - if not hasattr(app, 'extensions'): - app.extensions = {} - app.extensions['migrate'] = _MigrateConfig( - self, self.db, **self.alembic_ctx_kwargs) - - from flask_migrate.cli import db as db_cli_group - app.cli.add_command(db_cli_group, name=self.command) - - def configure(self, f): - self.configure_callbacks.append(f) - return f - - def call_configure_callbacks(self, config): - for f in self.configure_callbacks: - config = f(config) - return config - - def get_config(self, directory=None, x_arg=None, opts=None): - if directory is None: - directory = self.directory - directory = str(directory) - config = Config(os.path.join(directory, 'alembic.ini')) - config.set_main_option('script_location', directory) - if config.cmd_opts is None: - config.cmd_opts = argparse.Namespace() - for opt in opts or []: - setattr(config.cmd_opts, opt, True) - if not hasattr(config.cmd_opts, 'x'): - setattr(config.cmd_opts, 'x', []) - for x in getattr(g, 'x_arg', []): - config.cmd_opts.x.append(x) - if x_arg is not None: - if isinstance(x_arg, list) or isinstance(x_arg, tuple): - for x in x_arg: - config.cmd_opts.x.append(x) - else: - config.cmd_opts.x.append(x_arg) - return self.call_configure_callbacks(config) - - -def catch_errors(f): - @wraps(f) - def wrapped(*args, **kwargs): - try: - f(*args, **kwargs) - except (CommandError, RuntimeError) as exc: - log.error('Error: ' + str(exc)) - sys.exit(1) - return wrapped - - -@catch_errors -def list_templates(): - """List available templates.""" - config = Config() - config.print_stdout("Available templates:\n") - for tempname in sorted(os.listdir(config.get_template_directory())): - with open( - os.path.join(config.get_template_directory(), tempname, "README") - ) as readme: - synopsis = next(readme).strip() - config.print_stdout("%s - %s", tempname, synopsis) - - -@catch_errors -def init(directory=None, multidb=False, template=None, package=False): - """Creates a new migration repository""" - if directory is None: - directory = current_app.extensions['migrate'].directory - template_directory = None - if template is not None and ('/' in template or '\\' in template): - template_directory, template = os.path.split(template) - config = Config(template_directory=template_directory) - config.set_main_option('script_location', directory) - config.config_file_name = os.path.join(directory, 'alembic.ini') - config = current_app.extensions['migrate'].\ - migrate.call_configure_callbacks(config) - if multidb and template is None: - template = 'flask-multidb' - elif template is None: - template = 'flask' - command.init(config, directory, template=template, package=package) - - -@catch_errors -def revision(directory=None, message=None, autogenerate=False, sql=False, - head='head', splice=False, branch_label=None, version_path=None, - rev_id=None): - """Create a new revision file.""" - opts = ['autogenerate'] if autogenerate else None - config = current_app.extensions['migrate'].migrate.get_config( - directory, opts=opts) - command.revision(config, message, autogenerate=autogenerate, sql=sql, - head=head, splice=splice, branch_label=branch_label, - version_path=version_path, rev_id=rev_id) - - -@catch_errors -def migrate(directory=None, message=None, sql=False, head='head', splice=False, - branch_label=None, version_path=None, rev_id=None, x_arg=None): - """Alias for 'revision --autogenerate'""" - config = current_app.extensions['migrate'].migrate.get_config( - directory, opts=['autogenerate'], x_arg=x_arg) - command.revision(config, message, autogenerate=True, sql=sql, - head=head, splice=splice, branch_label=branch_label, - version_path=version_path, rev_id=rev_id) - - -@catch_errors -def edit(directory=None, revision='current'): - """Edit current revision.""" - if alembic_version >= (0, 8, 0): - config = current_app.extensions['migrate'].migrate.get_config( - directory) - command.edit(config, revision) - else: - raise RuntimeError('Alembic 0.8.0 or greater is required') - - -@catch_errors -def merge(directory=None, revisions='', message=None, branch_label=None, - rev_id=None): - """Merge two revisions together. Creates a new migration file""" - config = current_app.extensions['migrate'].migrate.get_config(directory) - command.merge(config, revisions, message=message, - branch_label=branch_label, rev_id=rev_id) - - -@catch_errors -def upgrade(directory=None, revision='head', sql=False, tag=None, x_arg=None): - """Upgrade to a later version""" - config = current_app.extensions['migrate'].migrate.get_config(directory, - x_arg=x_arg) - command.upgrade(config, revision, sql=sql, tag=tag) - - -@catch_errors -def downgrade(directory=None, revision='-1', sql=False, tag=None, x_arg=None): - """Revert to a previous version""" - config = current_app.extensions['migrate'].migrate.get_config(directory, - x_arg=x_arg) - if sql and revision == '-1': - revision = 'head:-1' - command.downgrade(config, revision, sql=sql, tag=tag) - - -@catch_errors -def show(directory=None, revision='head'): - """Show the revision denoted by the given symbol.""" - config = current_app.extensions['migrate'].migrate.get_config(directory) - command.show(config, revision) - - -@catch_errors -def history(directory=None, rev_range=None, verbose=False, - indicate_current=False): - """List changeset scripts in chronological order.""" - config = current_app.extensions['migrate'].migrate.get_config(directory) - if alembic_version >= (0, 9, 9): - command.history(config, rev_range, verbose=verbose, - indicate_current=indicate_current) - else: - command.history(config, rev_range, verbose=verbose) - - -@catch_errors -def heads(directory=None, verbose=False, resolve_dependencies=False): - """Show current available heads in the script directory""" - config = current_app.extensions['migrate'].migrate.get_config(directory) - command.heads(config, verbose=verbose, - resolve_dependencies=resolve_dependencies) - - -@catch_errors -def branches(directory=None, verbose=False): - """Show current branch points""" - config = current_app.extensions['migrate'].migrate.get_config(directory) - command.branches(config, verbose=verbose) - - -@catch_errors -def current(directory=None, verbose=False): - """Display the current revision for each database.""" - config = current_app.extensions['migrate'].migrate.get_config(directory) - command.current(config, verbose=verbose) - - -@catch_errors -def stamp(directory=None, revision='head', sql=False, tag=None, purge=False): - """'stamp' the revision table with the given revision; don't run any - migrations""" - config = current_app.extensions['migrate'].migrate.get_config(directory) - command.stamp(config, revision, sql=sql, tag=tag, purge=purge) - - -@catch_errors -def check(directory=None): - """Check if there are any new operations to migrate""" - config = current_app.extensions['migrate'].migrate.get_config(directory) - command.check(config) diff --git a/myenv/Lib/site-packages/flask_migrate/cli.py b/myenv/Lib/site-packages/flask_migrate/cli.py deleted file mode 100644 index 73a31ba..0000000 --- a/myenv/Lib/site-packages/flask_migrate/cli.py +++ /dev/null @@ -1,258 +0,0 @@ -import click -from flask import g -from flask.cli import with_appcontext -from flask_migrate import list_templates as _list_templates -from flask_migrate import init as _init -from flask_migrate import revision as _revision -from flask_migrate import migrate as _migrate -from flask_migrate import edit as _edit -from flask_migrate import merge as _merge -from flask_migrate import upgrade as _upgrade -from flask_migrate import downgrade as _downgrade -from flask_migrate import show as _show -from flask_migrate import history as _history -from flask_migrate import heads as _heads -from flask_migrate import branches as _branches -from flask_migrate import current as _current -from flask_migrate import stamp as _stamp -from flask_migrate import check as _check - - -@click.group() -@click.option('-x', '--x-arg', multiple=True, - help='Additional arguments consumed by custom env.py scripts') -@with_appcontext -def db(x_arg): - """Perform database migrations.""" - g.x_arg = x_arg # these will be picked up by Migrate.get_config() - - -@db.command() -@with_appcontext -def list_templates(): - """List available templates.""" - _list_templates() - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('--multidb', is_flag=True, - help=('Support multiple databases')) -@click.option('-t', '--template', default=None, - help=('Repository template to use (default is "flask")')) -@click.option('--package', is_flag=True, - help=('Write empty __init__.py files to the environment and ' - 'version locations')) -@with_appcontext -def init(directory, multidb, template, package): - """Creates a new migration repository.""" - _init(directory, multidb, template, package) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('-m', '--message', default=None, help='Revision message') -@click.option('--autogenerate', is_flag=True, - help=('Populate revision script with candidate migration ' - 'operations, based on comparison of database to model')) -@click.option('--sql', is_flag=True, - help=('Don\'t emit SQL to database - dump to standard output ' - 'instead')) -@click.option('--head', default='head', - help=('Specify head revision or @head to base new ' - 'revision on')) -@click.option('--splice', is_flag=True, - help=('Allow a non-head revision as the "head" to splice onto')) -@click.option('--branch-label', default=None, - help=('Specify a branch label to apply to the new revision')) -@click.option('--version-path', default=None, - help=('Specify specific path from config for version file')) -@click.option('--rev-id', default=None, - help=('Specify a hardcoded revision id instead of generating ' - 'one')) -@with_appcontext -def revision(directory, message, autogenerate, sql, head, splice, branch_label, - version_path, rev_id): - """Create a new revision file.""" - _revision(directory, message, autogenerate, sql, head, splice, - branch_label, version_path, rev_id) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('-m', '--message', default=None, help='Revision message') -@click.option('--sql', is_flag=True, - help=('Don\'t emit SQL to database - dump to standard output ' - 'instead')) -@click.option('--head', default='head', - help=('Specify head revision or @head to base new ' - 'revision on')) -@click.option('--splice', is_flag=True, - help=('Allow a non-head revision as the "head" to splice onto')) -@click.option('--branch-label', default=None, - help=('Specify a branch label to apply to the new revision')) -@click.option('--version-path', default=None, - help=('Specify specific path from config for version file')) -@click.option('--rev-id', default=None, - help=('Specify a hardcoded revision id instead of generating ' - 'one')) -@click.option('-x', '--x-arg', multiple=True, - help='Additional arguments consumed by custom env.py scripts') -@with_appcontext -def migrate(directory, message, sql, head, splice, branch_label, version_path, - rev_id, x_arg): - """Autogenerate a new revision file (Alias for - 'revision --autogenerate')""" - _migrate(directory, message, sql, head, splice, branch_label, version_path, - rev_id, x_arg) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.argument('revision', default='head') -@with_appcontext -def edit(directory, revision): - """Edit a revision file""" - _edit(directory, revision) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('-m', '--message', default=None, help='Merge revision message') -@click.option('--branch-label', default=None, - help=('Specify a branch label to apply to the new revision')) -@click.option('--rev-id', default=None, - help=('Specify a hardcoded revision id instead of generating ' - 'one')) -@click.argument('revisions', nargs=-1) -@with_appcontext -def merge(directory, message, branch_label, rev_id, revisions): - """Merge two revisions together, creating a new revision file""" - _merge(directory, revisions, message, branch_label, rev_id) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('--sql', is_flag=True, - help=('Don\'t emit SQL to database - dump to standard output ' - 'instead')) -@click.option('--tag', default=None, - help=('Arbitrary "tag" name - can be used by custom env.py ' - 'scripts')) -@click.option('-x', '--x-arg', multiple=True, - help='Additional arguments consumed by custom env.py scripts') -@click.argument('revision', default='head') -@with_appcontext -def upgrade(directory, sql, tag, x_arg, revision): - """Upgrade to a later version""" - _upgrade(directory, revision, sql, tag, x_arg) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('--sql', is_flag=True, - help=('Don\'t emit SQL to database - dump to standard output ' - 'instead')) -@click.option('--tag', default=None, - help=('Arbitrary "tag" name - can be used by custom env.py ' - 'scripts')) -@click.option('-x', '--x-arg', multiple=True, - help='Additional arguments consumed by custom env.py scripts') -@click.argument('revision', default='-1') -@with_appcontext -def downgrade(directory, sql, tag, x_arg, revision): - """Revert to a previous version""" - _downgrade(directory, revision, sql, tag, x_arg) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.argument('revision', default='head') -@with_appcontext -def show(directory, revision): - """Show the revision denoted by the given symbol.""" - _show(directory, revision) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('-r', '--rev-range', default=None, - help='Specify a revision range; format is [start]:[end]') -@click.option('-v', '--verbose', is_flag=True, help='Use more verbose output') -@click.option('-i', '--indicate-current', is_flag=True, - help=('Indicate current version (Alembic 0.9.9 or greater is ' - 'required)')) -@with_appcontext -def history(directory, rev_range, verbose, indicate_current): - """List changeset scripts in chronological order.""" - _history(directory, rev_range, verbose, indicate_current) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('-v', '--verbose', is_flag=True, help='Use more verbose output') -@click.option('--resolve-dependencies', is_flag=True, - help='Treat dependency versions as down revisions') -@with_appcontext -def heads(directory, verbose, resolve_dependencies): - """Show current available heads in the script directory""" - _heads(directory, verbose, resolve_dependencies) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('-v', '--verbose', is_flag=True, help='Use more verbose output') -@with_appcontext -def branches(directory, verbose): - """Show current branch points""" - _branches(directory, verbose) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('-v', '--verbose', is_flag=True, help='Use more verbose output') -@with_appcontext -def current(directory, verbose): - """Display the current revision for each database.""" - _current(directory, verbose) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@click.option('--sql', is_flag=True, - help=('Don\'t emit SQL to database - dump to standard output ' - 'instead')) -@click.option('--tag', default=None, - help=('Arbitrary "tag" name - can be used by custom env.py ' - 'scripts')) -@click.option('--purge', is_flag=True, - help=('Delete the version in the alembic_version table before ' - 'stamping')) -@click.argument('revision', default='head') -@with_appcontext -def stamp(directory, sql, tag, revision, purge): - """'stamp' the revision table with the given revision; don't run any - migrations""" - _stamp(directory, revision, sql, tag, purge) - - -@db.command() -@click.option('-d', '--directory', default=None, - help=('Migration script directory (default is "migrations")')) -@with_appcontext -def check(directory): - """Check if there are any new operations to migrate""" - _check(directory) diff --git a/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/README b/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/README deleted file mode 100644 index 02cce84..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/README +++ /dev/null @@ -1 +0,0 @@ -Multi-database configuration for aioflask. diff --git a/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/alembic.ini.mako b/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/alembic.ini.mako deleted file mode 100644 index ec9d45c..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/alembic.ini.mako +++ /dev/null @@ -1,50 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic,flask_migrate - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[logger_flask_migrate] -level = INFO -handlers = -qualname = flask_migrate - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/env.py b/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/env.py deleted file mode 100644 index f14bd6c..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/env.py +++ /dev/null @@ -1,202 +0,0 @@ -import asyncio -import logging -from logging.config import fileConfig - -from sqlalchemy import MetaData -from flask import current_app - -from alembic import context - -USE_TWOPHASE = False - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') - - -def get_engine(bind_key=None): - try: - # this works with Flask-SQLAlchemy<3 and Alchemical - return current_app.extensions['migrate'].db.get_engine(bind=bind_key) - except (TypeError, AttributeError): - # this works with Flask-SQLAlchemy>=3 - return current_app.extensions['migrate'].db.engines.get(bind_key) - - -def get_engine_url(bind_key=None): - try: - return get_engine(bind_key).url.render_as_string( - hide_password=False).replace('%', '%%') - except AttributeError: - return str(get_engine(bind_key).url).replace('%', '%%') - - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -config.set_main_option('sqlalchemy.url', get_engine_url()) -bind_names = [] -if current_app.config.get('SQLALCHEMY_BINDS') is not None: - bind_names = list(current_app.config['SQLALCHEMY_BINDS'].keys()) -else: - get_bind_names = getattr(current_app.extensions['migrate'].db, - 'bind_names', None) - if get_bind_names: - bind_names = get_bind_names() -for bind in bind_names: - context.config.set_section_option( - bind, "sqlalchemy.url", get_engine_url(bind_key=bind)) -target_db = current_app.extensions['migrate'].db - - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def get_metadata(bind): - """Return the metadata for a bind.""" - if bind == '': - bind = None - if hasattr(target_db, 'metadatas'): - return target_db.metadatas[bind] - - # legacy, less flexible implementation - m = MetaData() - for t in target_db.metadata.tables.values(): - if t.info.get('bind_key') == bind: - t.tometadata(m) - return m - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - # for the --sql use case, run migrations for each URL into - # individual files. - - engines = { - '': { - 'url': context.config.get_main_option('sqlalchemy.url') - } - } - for name in bind_names: - engines[name] = rec = {} - rec['url'] = context.config.get_section_option(name, "sqlalchemy.url") - - for name, rec in engines.items(): - logger.info("Migrating database %s" % (name or '')) - file_ = "%s.sql" % name - logger.info("Writing output to %s" % file_) - with open(file_, 'w') as buffer: - context.configure( - url=rec['url'], - output_buffer=buffer, - target_metadata=get_metadata(name), - literal_binds=True, - ) - with context.begin_transaction(): - context.run_migrations(engine_name=name) - - -def do_run_migrations(_, engines): - # this callback is used to prevent an auto-migration from being generated - # when there are no changes to the schema - # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html - def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, 'autogenerate', False): - script = directives[0] - if len(script.upgrade_ops_list) >= len(bind_names) + 1: - empty = True - for upgrade_ops in script.upgrade_ops_list: - if not upgrade_ops.is_empty(): - empty = False - if empty: - directives[:] = [] - logger.info('No changes in schema detected.') - - conf_args = current_app.extensions['migrate'].configure_args - if conf_args.get("process_revision_directives") is None: - conf_args["process_revision_directives"] = process_revision_directives - - for name, rec in engines.items(): - rec['sync_connection'] = conn = rec['connection']._sync_connection() - if USE_TWOPHASE: - rec['transaction'] = conn.begin_twophase() - else: - rec['transaction'] = conn.begin() - - try: - for name, rec in engines.items(): - logger.info("Migrating database %s" % (name or '')) - context.configure( - connection=rec['sync_connection'], - upgrade_token="%s_upgrades" % name, - downgrade_token="%s_downgrades" % name, - target_metadata=get_metadata(name), - **conf_args - ) - context.run_migrations(engine_name=name) - - if USE_TWOPHASE: - for rec in engines.values(): - rec['transaction'].prepare() - - for rec in engines.values(): - rec['transaction'].commit() - except: # noqa: E722 - for rec in engines.values(): - rec['transaction'].rollback() - raise - finally: - for rec in engines.values(): - rec['sync_connection'].close() - - -async def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - # for the direct-to-DB use case, start a transaction on all - # engines, then run all migrations, then commit all transactions. - engines = { - '': {'engine': get_engine()} - } - for name in bind_names: - engines[name] = rec = {} - rec['engine'] = get_engine(bind_key=name) - - for name, rec in engines.items(): - engine = rec['engine'] - rec['connection'] = await engine.connect().start() - - await engines['']['connection'].run_sync(do_run_migrations, engines) - - for rec in engines.values(): - await rec['connection'].close() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - asyncio.get_event_loop().run_until_complete(run_migrations_online()) diff --git a/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/script.py.mako b/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/script.py.mako deleted file mode 100644 index 3beabc4..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/aioflask-multidb/script.py.mako +++ /dev/null @@ -1,53 +0,0 @@ -<%! -import re - -%>"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade(engine_name): - globals()["upgrade_%s" % engine_name]() - - -def downgrade(engine_name): - globals()["downgrade_%s" % engine_name]() - -<% - from flask import current_app - bind_names = [] - if current_app.config.get('SQLALCHEMY_BINDS') is not None: - bind_names = list(current_app.config['SQLALCHEMY_BINDS'].keys()) - else: - get_bind_names = getattr(current_app.extensions['migrate'].db, 'bind_names', None) - if get_bind_names: - bind_names = get_bind_names() - db_names = [''] + bind_names -%> - -## generate an "upgrade_() / downgrade_()" function -## for each database name in the ini file. - -% for db_name in db_names: - -def upgrade_${db_name}(): - ${context.get("%s_upgrades" % db_name, "pass")} - - -def downgrade_${db_name}(): - ${context.get("%s_downgrades" % db_name, "pass")} - -% endfor diff --git a/myenv/Lib/site-packages/flask_migrate/templates/aioflask/README b/myenv/Lib/site-packages/flask_migrate/templates/aioflask/README deleted file mode 100644 index 6ed8020..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/aioflask/README +++ /dev/null @@ -1 +0,0 @@ -Single-database configuration for aioflask. diff --git a/myenv/Lib/site-packages/flask_migrate/templates/aioflask/alembic.ini.mako b/myenv/Lib/site-packages/flask_migrate/templates/aioflask/alembic.ini.mako deleted file mode 100644 index ec9d45c..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/aioflask/alembic.ini.mako +++ /dev/null @@ -1,50 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic,flask_migrate - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[logger_flask_migrate] -level = INFO -handlers = -qualname = flask_migrate - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/myenv/Lib/site-packages/flask_migrate/templates/aioflask/env.py b/myenv/Lib/site-packages/flask_migrate/templates/aioflask/env.py deleted file mode 100644 index 3a1ece5..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/aioflask/env.py +++ /dev/null @@ -1,118 +0,0 @@ -import asyncio -import logging -from logging.config import fileConfig - -from flask import current_app - -from alembic import context - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') - - -def get_engine(): - try: - # this works with Flask-SQLAlchemy<3 and Alchemical - return current_app.extensions['migrate'].db.get_engine() - except (TypeError, AttributeError): - # this works with Flask-SQLAlchemy>=3 - return current_app.extensions['migrate'].db.engine - - -def get_engine_url(): - try: - return get_engine().url.render_as_string(hide_password=False).replace( - '%', '%%') - except AttributeError: - return str(get_engine().url).replace('%', '%%') - - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -config.set_main_option('sqlalchemy.url', get_engine_url()) -target_db = current_app.extensions['migrate'].db - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def get_metadata(): - if hasattr(target_db, 'metadatas'): - return target_db.metadatas[None] - return target_db.metadata - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, target_metadata=get_metadata(), literal_binds=True - ) - - with context.begin_transaction(): - context.run_migrations() - - -def do_run_migrations(connection): - # this callback is used to prevent an auto-migration from being generated - # when there are no changes to the schema - # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html - def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, 'autogenerate', False): - script = directives[0] - if script.upgrade_ops.is_empty(): - directives[:] = [] - logger.info('No changes in schema detected.') - - conf_args = current_app.extensions['migrate'].configure_args - if conf_args.get("process_revision_directives") is None: - conf_args["process_revision_directives"] = process_revision_directives - - context.configure( - connection=connection, - target_metadata=get_metadata(), - **conf_args - ) - - with context.begin_transaction(): - context.run_migrations() - - -async def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - connectable = get_engine() - - async with connectable.connect() as connection: - await connection.run_sync(do_run_migrations) - - -if context.is_offline_mode(): - run_migrations_offline() -else: - asyncio.get_event_loop().run_until_complete(run_migrations_online()) diff --git a/myenv/Lib/site-packages/flask_migrate/templates/aioflask/script.py.mako b/myenv/Lib/site-packages/flask_migrate/templates/aioflask/script.py.mako deleted file mode 100644 index 2c01563..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/aioflask/script.py.mako +++ /dev/null @@ -1,24 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/README b/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/README deleted file mode 100644 index eaae251..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/README +++ /dev/null @@ -1 +0,0 @@ -Multi-database configuration for Flask. diff --git a/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/alembic.ini.mako b/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/alembic.ini.mako deleted file mode 100644 index ec9d45c..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/alembic.ini.mako +++ /dev/null @@ -1,50 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic,flask_migrate - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[logger_flask_migrate] -level = INFO -handlers = -qualname = flask_migrate - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/env.py b/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/env.py deleted file mode 100644 index 31b8e72..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/env.py +++ /dev/null @@ -1,191 +0,0 @@ -import logging -from logging.config import fileConfig - -from sqlalchemy import MetaData -from flask import current_app - -from alembic import context - -USE_TWOPHASE = False - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') - - -def get_engine(bind_key=None): - try: - # this works with Flask-SQLAlchemy<3 and Alchemical - return current_app.extensions['migrate'].db.get_engine(bind=bind_key) - except (TypeError, AttributeError): - # this works with Flask-SQLAlchemy>=3 - return current_app.extensions['migrate'].db.engines.get(bind_key) - - -def get_engine_url(bind_key=None): - try: - return get_engine(bind_key).url.render_as_string( - hide_password=False).replace('%', '%%') - except AttributeError: - return str(get_engine(bind_key).url).replace('%', '%%') - - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -config.set_main_option('sqlalchemy.url', get_engine_url()) -bind_names = [] -if current_app.config.get('SQLALCHEMY_BINDS') is not None: - bind_names = list(current_app.config['SQLALCHEMY_BINDS'].keys()) -else: - get_bind_names = getattr(current_app.extensions['migrate'].db, - 'bind_names', None) - if get_bind_names: - bind_names = get_bind_names() -for bind in bind_names: - context.config.set_section_option( - bind, "sqlalchemy.url", get_engine_url(bind_key=bind)) -target_db = current_app.extensions['migrate'].db - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def get_metadata(bind): - """Return the metadata for a bind.""" - if bind == '': - bind = None - if hasattr(target_db, 'metadatas'): - return target_db.metadatas[bind] - - # legacy, less flexible implementation - m = MetaData() - for t in target_db.metadata.tables.values(): - if t.info.get('bind_key') == bind: - t.tometadata(m) - return m - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - # for the --sql use case, run migrations for each URL into - # individual files. - - engines = { - '': { - 'url': context.config.get_main_option('sqlalchemy.url') - } - } - for name in bind_names: - engines[name] = rec = {} - rec['url'] = context.config.get_section_option(name, "sqlalchemy.url") - - for name, rec in engines.items(): - logger.info("Migrating database %s" % (name or '')) - file_ = "%s.sql" % name - logger.info("Writing output to %s" % file_) - with open(file_, 'w') as buffer: - context.configure( - url=rec['url'], - output_buffer=buffer, - target_metadata=get_metadata(name), - literal_binds=True, - ) - with context.begin_transaction(): - context.run_migrations(engine_name=name) - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - # this callback is used to prevent an auto-migration from being generated - # when there are no changes to the schema - # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html - def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, 'autogenerate', False): - script = directives[0] - if len(script.upgrade_ops_list) >= len(bind_names) + 1: - empty = True - for upgrade_ops in script.upgrade_ops_list: - if not upgrade_ops.is_empty(): - empty = False - if empty: - directives[:] = [] - logger.info('No changes in schema detected.') - - conf_args = current_app.extensions['migrate'].configure_args - if conf_args.get("process_revision_directives") is None: - conf_args["process_revision_directives"] = process_revision_directives - - # for the direct-to-DB use case, start a transaction on all - # engines, then run all migrations, then commit all transactions. - engines = { - '': {'engine': get_engine()} - } - for name in bind_names: - engines[name] = rec = {} - rec['engine'] = get_engine(bind_key=name) - - for name, rec in engines.items(): - engine = rec['engine'] - rec['connection'] = conn = engine.connect() - - if USE_TWOPHASE: - rec['transaction'] = conn.begin_twophase() - else: - rec['transaction'] = conn.begin() - - try: - for name, rec in engines.items(): - logger.info("Migrating database %s" % (name or '')) - context.configure( - connection=rec['connection'], - upgrade_token="%s_upgrades" % name, - downgrade_token="%s_downgrades" % name, - target_metadata=get_metadata(name), - **conf_args - ) - context.run_migrations(engine_name=name) - - if USE_TWOPHASE: - for rec in engines.values(): - rec['transaction'].prepare() - - for rec in engines.values(): - rec['transaction'].commit() - except: # noqa: E722 - for rec in engines.values(): - rec['transaction'].rollback() - raise - finally: - for rec in engines.values(): - rec['connection'].close() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/script.py.mako b/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/script.py.mako deleted file mode 100644 index 3beabc4..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/flask-multidb/script.py.mako +++ /dev/null @@ -1,53 +0,0 @@ -<%! -import re - -%>"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade(engine_name): - globals()["upgrade_%s" % engine_name]() - - -def downgrade(engine_name): - globals()["downgrade_%s" % engine_name]() - -<% - from flask import current_app - bind_names = [] - if current_app.config.get('SQLALCHEMY_BINDS') is not None: - bind_names = list(current_app.config['SQLALCHEMY_BINDS'].keys()) - else: - get_bind_names = getattr(current_app.extensions['migrate'].db, 'bind_names', None) - if get_bind_names: - bind_names = get_bind_names() - db_names = [''] + bind_names -%> - -## generate an "upgrade_() / downgrade_()" function -## for each database name in the ini file. - -% for db_name in db_names: - -def upgrade_${db_name}(): - ${context.get("%s_upgrades" % db_name, "pass")} - - -def downgrade_${db_name}(): - ${context.get("%s_downgrades" % db_name, "pass")} - -% endfor diff --git a/myenv/Lib/site-packages/flask_migrate/templates/flask/README b/myenv/Lib/site-packages/flask_migrate/templates/flask/README deleted file mode 100644 index 0e04844..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/flask/README +++ /dev/null @@ -1 +0,0 @@ -Single-database configuration for Flask. diff --git a/myenv/Lib/site-packages/flask_migrate/templates/flask/alembic.ini.mako b/myenv/Lib/site-packages/flask_migrate/templates/flask/alembic.ini.mako deleted file mode 100644 index ec9d45c..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/flask/alembic.ini.mako +++ /dev/null @@ -1,50 +0,0 @@ -# A generic, single database configuration. - -[alembic] -# template used to generate migration files -# file_template = %%(rev)s_%%(slug)s - -# set to 'true' to run the environment during -# the 'revision' command, regardless of autogenerate -# revision_environment = false - - -# Logging configuration -[loggers] -keys = root,sqlalchemy,alembic,flask_migrate - -[handlers] -keys = console - -[formatters] -keys = generic - -[logger_root] -level = WARN -handlers = console -qualname = - -[logger_sqlalchemy] -level = WARN -handlers = -qualname = sqlalchemy.engine - -[logger_alembic] -level = INFO -handlers = -qualname = alembic - -[logger_flask_migrate] -level = INFO -handlers = -qualname = flask_migrate - -[handler_console] -class = StreamHandler -args = (sys.stderr,) -level = NOTSET -formatter = generic - -[formatter_generic] -format = %(levelname)-5.5s [%(name)s] %(message)s -datefmt = %H:%M:%S diff --git a/myenv/Lib/site-packages/flask_migrate/templates/flask/env.py b/myenv/Lib/site-packages/flask_migrate/templates/flask/env.py deleted file mode 100644 index 4c97092..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/flask/env.py +++ /dev/null @@ -1,113 +0,0 @@ -import logging -from logging.config import fileConfig - -from flask import current_app - -from alembic import context - -# this is the Alembic Config object, which provides -# access to the values within the .ini file in use. -config = context.config - -# Interpret the config file for Python logging. -# This line sets up loggers basically. -fileConfig(config.config_file_name) -logger = logging.getLogger('alembic.env') - - -def get_engine(): - try: - # this works with Flask-SQLAlchemy<3 and Alchemical - return current_app.extensions['migrate'].db.get_engine() - except (TypeError, AttributeError): - # this works with Flask-SQLAlchemy>=3 - return current_app.extensions['migrate'].db.engine - - -def get_engine_url(): - try: - return get_engine().url.render_as_string(hide_password=False).replace( - '%', '%%') - except AttributeError: - return str(get_engine().url).replace('%', '%%') - - -# add your model's MetaData object here -# for 'autogenerate' support -# from myapp import mymodel -# target_metadata = mymodel.Base.metadata -config.set_main_option('sqlalchemy.url', get_engine_url()) -target_db = current_app.extensions['migrate'].db - -# other values from the config, defined by the needs of env.py, -# can be acquired: -# my_important_option = config.get_main_option("my_important_option") -# ... etc. - - -def get_metadata(): - if hasattr(target_db, 'metadatas'): - return target_db.metadatas[None] - return target_db.metadata - - -def run_migrations_offline(): - """Run migrations in 'offline' mode. - - This configures the context with just a URL - and not an Engine, though an Engine is acceptable - here as well. By skipping the Engine creation - we don't even need a DBAPI to be available. - - Calls to context.execute() here emit the given string to the - script output. - - """ - url = config.get_main_option("sqlalchemy.url") - context.configure( - url=url, target_metadata=get_metadata(), literal_binds=True - ) - - with context.begin_transaction(): - context.run_migrations() - - -def run_migrations_online(): - """Run migrations in 'online' mode. - - In this scenario we need to create an Engine - and associate a connection with the context. - - """ - - # this callback is used to prevent an auto-migration from being generated - # when there are no changes to the schema - # reference: http://alembic.zzzcomputing.com/en/latest/cookbook.html - def process_revision_directives(context, revision, directives): - if getattr(config.cmd_opts, 'autogenerate', False): - script = directives[0] - if script.upgrade_ops.is_empty(): - directives[:] = [] - logger.info('No changes in schema detected.') - - conf_args = current_app.extensions['migrate'].configure_args - if conf_args.get("process_revision_directives") is None: - conf_args["process_revision_directives"] = process_revision_directives - - connectable = get_engine() - - with connectable.connect() as connection: - context.configure( - connection=connection, - target_metadata=get_metadata(), - **conf_args - ) - - with context.begin_transaction(): - context.run_migrations() - - -if context.is_offline_mode(): - run_migrations_offline() -else: - run_migrations_online() diff --git a/myenv/Lib/site-packages/flask_migrate/templates/flask/script.py.mako b/myenv/Lib/site-packages/flask_migrate/templates/flask/script.py.mako deleted file mode 100644 index 2c01563..0000000 --- a/myenv/Lib/site-packages/flask_migrate/templates/flask/script.py.mako +++ /dev/null @@ -1,24 +0,0 @@ -"""${message} - -Revision ID: ${up_revision} -Revises: ${down_revision | comma,n} -Create Date: ${create_date} - -""" -from alembic import op -import sqlalchemy as sa -${imports if imports else ""} - -# revision identifiers, used by Alembic. -revision = ${repr(up_revision)} -down_revision = ${repr(down_revision)} -branch_labels = ${repr(branch_labels)} -depends_on = ${repr(depends_on)} - - -def upgrade(): - ${upgrades if upgrades else "pass"} - - -def downgrade(): - ${downgrades if downgrades else "pass"} diff --git a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/INSTALLER b/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/LICENSE.rst b/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/LICENSE.rst deleted file mode 100644 index 9d227a0..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/LICENSE.rst +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2010 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/METADATA b/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/METADATA deleted file mode 100644 index 92f239c..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/METADATA +++ /dev/null @@ -1,109 +0,0 @@ -Metadata-Version: 2.1 -Name: Flask-SQLAlchemy -Version: 3.1.1 -Summary: Add SQLAlchemy support to your Flask application. -Maintainer-email: Pallets -Requires-Python: >=3.8 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Requires-Dist: flask>=2.2.5 -Requires-Dist: sqlalchemy>=2.0.16 -Project-URL: Changes, https://flask-sqlalchemy.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://flask-sqlalchemy.palletsprojects.com -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Issue Tracker, https://github.com/pallets-eco/flask-sqlalchemy/issues/ -Project-URL: Source Code, https://github.com/pallets-eco/flask-sqlalchemy/ - -Flask-SQLAlchemy -================ - -Flask-SQLAlchemy is an extension for `Flask`_ that adds support for -`SQLAlchemy`_ to your application. It aims to simplify using SQLAlchemy -with Flask by providing useful defaults and extra helpers that make it -easier to accomplish common tasks. - -.. _Flask: https://palletsprojects.com/p/flask/ -.. _SQLAlchemy: https://www.sqlalchemy.org - - -Installing ----------- - -Install and update using `pip`_: - -.. code-block:: text - - $ pip install -U Flask-SQLAlchemy - -.. _pip: https://pip.pypa.io/en/stable/getting-started/ - - -A Simple Example ----------------- - -.. code-block:: python - - from flask import Flask - from flask_sqlalchemy import SQLAlchemy - from sqlalchemy.orm import DeclarativeBase, Mapped, mapped_column - - app = Flask(__name__) - app.config["SQLALCHEMY_DATABASE_URI"] = "sqlite:///example.sqlite" - - class Base(DeclarativeBase): - pass - - db = SQLAlchemy(app, model_class=Base) - - class User(db.Model): - id: Mapped[int] = mapped_column(db.Integer, primary_key=True) - username: Mapped[str] = mapped_column(db.String, unique=True, nullable=False) - - with app.app_context(): - db.create_all() - - db.session.add(User(username="example")) - db.session.commit() - - users = db.session.execute(db.select(User)).scalars() - - -Contributing ------------- - -For guidance on setting up a development environment and how to make a -contribution to Flask-SQLAlchemy, see the `contributing guidelines`_. - -.. _contributing guidelines: https://github.com/pallets-eco/flask-sqlalchemy/blob/main/CONTRIBUTING.rst - - -Donate ------- - -The Pallets organization develops and supports Flask-SQLAlchemy and -other popular packages. In order to grow the community of contributors -and users, and allow the maintainers to devote more time to the -projects, `please donate today`_. - -.. _please donate today: https://palletsprojects.com/donate - - -Links ------ - -- Documentation: https://flask-sqlalchemy.palletsprojects.com/ -- Changes: https://flask-sqlalchemy.palletsprojects.com/changes/ -- PyPI Releases: https://pypi.org/project/Flask-SQLAlchemy/ -- Source Code: https://github.com/pallets-eco/flask-sqlalchemy/ -- Issue Tracker: https://github.com/pallets-eco/flask-sqlalchemy/issues/ -- Website: https://palletsprojects.com/ -- Twitter: https://twitter.com/PalletsTeam -- Chat: https://discord.gg/pallets - diff --git a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/RECORD b/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/RECORD deleted file mode 100644 index 95c47ac..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/RECORD +++ /dev/null @@ -1,27 +0,0 @@ -flask_sqlalchemy-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -flask_sqlalchemy-3.1.1.dist-info/LICENSE.rst,sha256=SJqOEQhQntmKN7uYPhHg9-HTHwvY-Zp5yESOf_N9B-o,1475 -flask_sqlalchemy-3.1.1.dist-info/METADATA,sha256=lBxR1akBt7n9XBjIVTL2OV52OhCfFrb-Mqtoe0DCbR8,3432 -flask_sqlalchemy-3.1.1.dist-info/RECORD,, -flask_sqlalchemy-3.1.1.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask_sqlalchemy-3.1.1.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -flask_sqlalchemy/__init__.py,sha256=he_w4qQQVS2Z1ms5GCTptDTXNOXBXw0n8zSuWCp8n6Y,653 -flask_sqlalchemy/__pycache__/__init__.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/cli.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/extension.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/model.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/pagination.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/query.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/record_queries.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/session.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/table.cpython-312.pyc,, -flask_sqlalchemy/__pycache__/track_modifications.cpython-312.pyc,, -flask_sqlalchemy/cli.py,sha256=pg3QDxP36GW2qnwe_CpPtkRhPchyVSGM6zlBNWuNCFE,484 -flask_sqlalchemy/extension.py,sha256=71tP_kNtb5VgZdafy_OH1sWdZOA6PaT7cJqX7tKgZ-k,38261 -flask_sqlalchemy/model.py,sha256=_mSisC2Eni0TgTyFWeN_O4LIexTeP_sVTdxh03yMK50,11461 -flask_sqlalchemy/pagination.py,sha256=JFpllrqkRkwacb8DAmQWaz9wsvQa0dypfSkhUDSC2ws,11119 -flask_sqlalchemy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -flask_sqlalchemy/query.py,sha256=Uls9qbmnpb9Vba43EDfsRP17eHJ0X4VG7SE22tH5R3g,3748 -flask_sqlalchemy/record_queries.py,sha256=ouS1ayj16h76LJprx13iYdoFZbm6m8OncrOgAVbG1Sk,3520 -flask_sqlalchemy/session.py,sha256=pBbtN8iDc8yuGVt0k18BvZHh2uEI7QPzZXO7eXrRi1g,3426 -flask_sqlalchemy/table.py,sha256=wAPOy8qwyAxpMwOIUJY4iMOultzz2W0D6xvBkQ7U2CE,859 -flask_sqlalchemy/track_modifications.py,sha256=yieyozj7IiVzwnAGZ-ZrgqrzjrUfG0kPrXBfW_hStSU,2755 diff --git a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/REQUESTED b/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/WHEEL b/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy-3.1.1.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/__init__.py b/myenv/Lib/site-packages/flask_sqlalchemy/__init__.py deleted file mode 100644 index c2fa059..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy/__init__.py +++ /dev/null @@ -1,26 +0,0 @@ -from __future__ import annotations - -import typing as t - -from .extension import SQLAlchemy - -__all__ = [ - "SQLAlchemy", -] - - -def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " Flask-SQLAlchemy 3.2. Use feature detection or" - " 'importlib.metadata.version(\"flask-sqlalchemy\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("flask-sqlalchemy") - - raise AttributeError(name) diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/cli.py b/myenv/Lib/site-packages/flask_sqlalchemy/cli.py deleted file mode 100644 index d7d7e4b..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy/cli.py +++ /dev/null @@ -1,16 +0,0 @@ -from __future__ import annotations - -import typing as t - -from flask import current_app - - -def add_models_to_shell() -> dict[str, t.Any]: - """Registered with :meth:`~flask.Flask.shell_context_processor` if - ``add_models_to_shell`` is enabled. Adds the ``db`` instance and all model classes - to ``flask shell``. - """ - db = current_app.extensions["sqlalchemy"] - out = {m.class_.__name__: m.class_ for m in db.Model._sa_registry.mappers} - out["db"] = db - return out diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/extension.py b/myenv/Lib/site-packages/flask_sqlalchemy/extension.py deleted file mode 100644 index 43e1b9a..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy/extension.py +++ /dev/null @@ -1,1008 +0,0 @@ -from __future__ import annotations - -import os -import types -import typing as t -import warnings -from weakref import WeakKeyDictionary - -import sqlalchemy as sa -import sqlalchemy.event as sa_event -import sqlalchemy.exc as sa_exc -import sqlalchemy.orm as sa_orm -from flask import abort -from flask import current_app -from flask import Flask -from flask import has_app_context - -from .model import _QueryProperty -from .model import BindMixin -from .model import DefaultMeta -from .model import DefaultMetaNoName -from .model import Model -from .model import NameMixin -from .pagination import Pagination -from .pagination import SelectPagination -from .query import Query -from .session import _app_ctx_id -from .session import Session -from .table import _Table - -_O = t.TypeVar("_O", bound=object) # Based on sqlalchemy.orm._typing.py - - -# Type accepted for model_class argument -_FSA_MCT = t.TypeVar( - "_FSA_MCT", - bound=t.Union[ - t.Type[Model], - sa_orm.DeclarativeMeta, - t.Type[sa_orm.DeclarativeBase], - t.Type[sa_orm.DeclarativeBaseNoMeta], - ], -) - - -# Type returned by make_declarative_base -class _FSAModel(Model): - metadata: sa.MetaData - - -def _get_2x_declarative_bases( - model_class: _FSA_MCT, -) -> list[t.Type[t.Union[sa_orm.DeclarativeBase, sa_orm.DeclarativeBaseNoMeta]]]: - return [ - b - for b in model_class.__bases__ - if issubclass(b, (sa_orm.DeclarativeBase, sa_orm.DeclarativeBaseNoMeta)) - ] - - -class SQLAlchemy: - """Integrates SQLAlchemy with Flask. This handles setting up one or more engines, - associating tables and models with specific engines, and cleaning up connections and - sessions after each request. - - Only the engine configuration is specific to each application, other things like - the model, table, metadata, and session are shared for all applications using that - extension instance. Call :meth:`init_app` to configure the extension on an - application. - - After creating the extension, create model classes by subclassing :attr:`Model`, and - table classes with :attr:`Table`. These can be accessed before :meth:`init_app` is - called, making it possible to define the models separately from the application. - - Accessing :attr:`session` and :attr:`engine` requires an active Flask application - context. This includes methods like :meth:`create_all` which use the engine. - - This class also provides access to names in SQLAlchemy's ``sqlalchemy`` and - ``sqlalchemy.orm`` modules. For example, you can use ``db.Column`` and - ``db.relationship`` instead of importing ``sqlalchemy.Column`` and - ``sqlalchemy.orm.relationship``. This can be convenient when defining models. - - :param app: Call :meth:`init_app` on this Flask application now. - :param metadata: Use this as the default :class:`sqlalchemy.schema.MetaData`. Useful - for setting a naming convention. - :param session_options: Arguments used by :attr:`session` to create each session - instance. A ``scopefunc`` key will be passed to the scoped session, not the - session instance. See :class:`sqlalchemy.orm.sessionmaker` for a list of - arguments. - :param query_class: Use this as the default query class for models and dynamic - relationships. The query interface is considered legacy in SQLAlchemy. - :param model_class: Use this as the model base class when creating the declarative - model class :attr:`Model`. Can also be a fully created declarative model class - for further customization. - :param engine_options: Default arguments used when creating every engine. These are - lower precedence than application config. See :func:`sqlalchemy.create_engine` - for a list of arguments. - :param add_models_to_shell: Add the ``db`` instance and all model classes to - ``flask shell``. - - .. versionchanged:: 3.1.0 - The ``metadata`` parameter can still be used with SQLAlchemy 1.x classes, - but is ignored when using SQLAlchemy 2.x style of declarative classes. - Instead, specify metadata on your Base class. - - .. versionchanged:: 3.1.0 - Added the ``disable_autonaming`` parameter. - - .. versionchanged:: 3.1.0 - Changed ``model_class`` parameter to accepta SQLAlchemy 2.x - declarative base subclass. - - .. versionchanged:: 3.0 - An active Flask application context is always required to access ``session`` and - ``engine``. - - .. versionchanged:: 3.0 - Separate ``metadata`` are used for each bind key. - - .. versionchanged:: 3.0 - The ``engine_options`` parameter is applied as defaults before per-engine - configuration. - - .. versionchanged:: 3.0 - The session class can be customized in ``session_options``. - - .. versionchanged:: 3.0 - Added the ``add_models_to_shell`` parameter. - - .. versionchanged:: 3.0 - Engines are created when calling ``init_app`` rather than the first time they - are accessed. - - .. versionchanged:: 3.0 - All parameters except ``app`` are keyword-only. - - .. versionchanged:: 3.0 - The extension instance is stored directly as ``app.extensions["sqlalchemy"]``. - - .. versionchanged:: 3.0 - Setup methods are renamed with a leading underscore. They are considered - internal interfaces which may change at any time. - - .. versionchanged:: 3.0 - Removed the ``use_native_unicode`` parameter and config. - - .. versionchanged:: 2.4 - Added the ``engine_options`` parameter. - - .. versionchanged:: 2.1 - Added the ``metadata``, ``query_class``, and ``model_class`` parameters. - - .. versionchanged:: 2.1 - Use the same query class across ``session``, ``Model.query`` and - ``Query``. - - .. versionchanged:: 0.16 - ``scopefunc`` is accepted in ``session_options``. - - .. versionchanged:: 0.10 - Added the ``session_options`` parameter. - """ - - def __init__( - self, - app: Flask | None = None, - *, - metadata: sa.MetaData | None = None, - session_options: dict[str, t.Any] | None = None, - query_class: type[Query] = Query, - model_class: _FSA_MCT = Model, # type: ignore[assignment] - engine_options: dict[str, t.Any] | None = None, - add_models_to_shell: bool = True, - disable_autonaming: bool = False, - ): - if session_options is None: - session_options = {} - - self.Query = query_class - """The default query class used by ``Model.query`` and ``lazy="dynamic"`` - relationships. - - .. warning:: - The query interface is considered legacy in SQLAlchemy. - - Customize this by passing the ``query_class`` parameter to the extension. - """ - - self.session = self._make_scoped_session(session_options) - """A :class:`sqlalchemy.orm.scoping.scoped_session` that creates instances of - :class:`.Session` scoped to the current Flask application context. The session - will be removed, returning the engine connection to the pool, when the - application context exits. - - Customize this by passing ``session_options`` to the extension. - - This requires that a Flask application context is active. - - .. versionchanged:: 3.0 - The session is scoped to the current app context. - """ - - self.metadatas: dict[str | None, sa.MetaData] = {} - """Map of bind keys to :class:`sqlalchemy.schema.MetaData` instances. The - ``None`` key refers to the default metadata, and is available as - :attr:`metadata`. - - Customize the default metadata by passing the ``metadata`` parameter to the - extension. This can be used to set a naming convention. When metadata for - another bind key is created, it copies the default's naming convention. - - .. versionadded:: 3.0 - """ - - if metadata is not None: - if len(_get_2x_declarative_bases(model_class)) > 0: - warnings.warn( - "When using SQLAlchemy 2.x style of declarative classes," - " the `metadata` should be an attribute of the base class." - "The metadata passed into SQLAlchemy() is ignored.", - DeprecationWarning, - stacklevel=2, - ) - else: - metadata.info["bind_key"] = None - self.metadatas[None] = metadata - - self.Table = self._make_table_class() - """A :class:`sqlalchemy.schema.Table` class that chooses a metadata - automatically. - - Unlike the base ``Table``, the ``metadata`` argument is not required. If it is - not given, it is selected based on the ``bind_key`` argument. - - :param bind_key: Used to select a different metadata. - :param args: Arguments passed to the base class. These are typically the table's - name, columns, and constraints. - :param kwargs: Arguments passed to the base class. - - .. versionchanged:: 3.0 - This is a subclass of SQLAlchemy's ``Table`` rather than a function. - """ - - self.Model = self._make_declarative_base( - model_class, disable_autonaming=disable_autonaming - ) - """A SQLAlchemy declarative model class. Subclass this to define database - models. - - If a model does not set ``__tablename__``, it will be generated by converting - the class name from ``CamelCase`` to ``snake_case``. It will not be generated - if the model looks like it uses single-table inheritance. - - If a model or parent class sets ``__bind_key__``, it will use that metadata and - database engine. Otherwise, it will use the default :attr:`metadata` and - :attr:`engine`. This is ignored if the model sets ``metadata`` or ``__table__``. - - For code using the SQLAlchemy 1.x API, customize this model by subclassing - :class:`.Model` and passing the ``model_class`` parameter to the extension. - A fully created declarative model class can be - passed as well, to use a custom metaclass. - - For code using the SQLAlchemy 2.x API, customize this model by subclassing - :class:`sqlalchemy.orm.DeclarativeBase` or - :class:`sqlalchemy.orm.DeclarativeBaseNoMeta` - and passing the ``model_class`` parameter to the extension. - """ - - if engine_options is None: - engine_options = {} - - self._engine_options = engine_options - self._app_engines: WeakKeyDictionary[Flask, dict[str | None, sa.engine.Engine]] - self._app_engines = WeakKeyDictionary() - self._add_models_to_shell = add_models_to_shell - - if app is not None: - self.init_app(app) - - def __repr__(self) -> str: - if not has_app_context(): - return f"<{type(self).__name__}>" - - message = f"{type(self).__name__} {self.engine.url}" - - if len(self.engines) > 1: - message = f"{message} +{len(self.engines) - 1}" - - return f"<{message}>" - - def init_app(self, app: Flask) -> None: - """Initialize a Flask application for use with this extension instance. This - must be called before accessing the database engine or session with the app. - - This sets default configuration values, then configures the extension on the - application and creates the engines for each bind key. Therefore, this must be - called after the application has been configured. Changes to application config - after this call will not be reflected. - - The following keys from ``app.config`` are used: - - - :data:`.SQLALCHEMY_DATABASE_URI` - - :data:`.SQLALCHEMY_ENGINE_OPTIONS` - - :data:`.SQLALCHEMY_ECHO` - - :data:`.SQLALCHEMY_BINDS` - - :data:`.SQLALCHEMY_RECORD_QUERIES` - - :data:`.SQLALCHEMY_TRACK_MODIFICATIONS` - - :param app: The Flask application to initialize. - """ - if "sqlalchemy" in app.extensions: - raise RuntimeError( - "A 'SQLAlchemy' instance has already been registered on this Flask app." - " Import and use that instance instead." - ) - - app.extensions["sqlalchemy"] = self - app.teardown_appcontext(self._teardown_session) - - if self._add_models_to_shell: - from .cli import add_models_to_shell - - app.shell_context_processor(add_models_to_shell) - - basic_uri: str | sa.engine.URL | None = app.config.setdefault( - "SQLALCHEMY_DATABASE_URI", None - ) - basic_engine_options = self._engine_options.copy() - basic_engine_options.update( - app.config.setdefault("SQLALCHEMY_ENGINE_OPTIONS", {}) - ) - echo: bool = app.config.setdefault("SQLALCHEMY_ECHO", False) - config_binds: dict[ - str | None, str | sa.engine.URL | dict[str, t.Any] - ] = app.config.setdefault("SQLALCHEMY_BINDS", {}) - engine_options: dict[str | None, dict[str, t.Any]] = {} - - # Build the engine config for each bind key. - for key, value in config_binds.items(): - engine_options[key] = self._engine_options.copy() - - if isinstance(value, (str, sa.engine.URL)): - engine_options[key]["url"] = value - else: - engine_options[key].update(value) - - # Build the engine config for the default bind key. - if basic_uri is not None: - basic_engine_options["url"] = basic_uri - - if "url" in basic_engine_options: - engine_options.setdefault(None, {}).update(basic_engine_options) - - if not engine_options: - raise RuntimeError( - "Either 'SQLALCHEMY_DATABASE_URI' or 'SQLALCHEMY_BINDS' must be set." - ) - - engines = self._app_engines.setdefault(app, {}) - - # Dispose existing engines in case init_app is called again. - if engines: - for engine in engines.values(): - engine.dispose() - - engines.clear() - - # Create the metadata and engine for each bind key. - for key, options in engine_options.items(): - self._make_metadata(key) - options.setdefault("echo", echo) - options.setdefault("echo_pool", echo) - self._apply_driver_defaults(options, app) - engines[key] = self._make_engine(key, options, app) - - if app.config.setdefault("SQLALCHEMY_RECORD_QUERIES", False): - from . import record_queries - - for engine in engines.values(): - record_queries._listen(engine) - - if app.config.setdefault("SQLALCHEMY_TRACK_MODIFICATIONS", False): - from . import track_modifications - - track_modifications._listen(self.session) - - def _make_scoped_session( - self, options: dict[str, t.Any] - ) -> sa_orm.scoped_session[Session]: - """Create a :class:`sqlalchemy.orm.scoping.scoped_session` around the factory - from :meth:`_make_session_factory`. The result is available as :attr:`session`. - - The scope function can be customized using the ``scopefunc`` key in the - ``session_options`` parameter to the extension. By default it uses the current - thread or greenlet id. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param options: The ``session_options`` parameter from ``__init__``. Keyword - arguments passed to the session factory. A ``scopefunc`` key is popped. - - .. versionchanged:: 3.0 - The session is scoped to the current app context. - - .. versionchanged:: 3.0 - Renamed from ``create_scoped_session``, this method is internal. - """ - scope = options.pop("scopefunc", _app_ctx_id) - factory = self._make_session_factory(options) - return sa_orm.scoped_session(factory, scope) - - def _make_session_factory( - self, options: dict[str, t.Any] - ) -> sa_orm.sessionmaker[Session]: - """Create the SQLAlchemy :class:`sqlalchemy.orm.sessionmaker` used by - :meth:`_make_scoped_session`. - - To customize, pass the ``session_options`` parameter to :class:`SQLAlchemy`. To - customize the session class, subclass :class:`.Session` and pass it as the - ``class_`` key. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param options: The ``session_options`` parameter from ``__init__``. Keyword - arguments passed to the session factory. - - .. versionchanged:: 3.0 - The session class can be customized. - - .. versionchanged:: 3.0 - Renamed from ``create_session``, this method is internal. - """ - options.setdefault("class_", Session) - options.setdefault("query_cls", self.Query) - return sa_orm.sessionmaker(db=self, **options) - - def _teardown_session(self, exc: BaseException | None) -> None: - """Remove the current session at the end of the request. - - :meta private: - - .. versionadded:: 3.0 - """ - self.session.remove() - - def _make_metadata(self, bind_key: str | None) -> sa.MetaData: - """Get or create a :class:`sqlalchemy.schema.MetaData` for the given bind key. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param bind_key: The name of the metadata being created. - - .. versionadded:: 3.0 - """ - if bind_key in self.metadatas: - return self.metadatas[bind_key] - - if bind_key is not None: - # Copy the naming convention from the default metadata. - naming_convention = self._make_metadata(None).naming_convention - else: - naming_convention = None - - # Set the bind key in info to be used by session.get_bind. - metadata = sa.MetaData( - naming_convention=naming_convention, info={"bind_key": bind_key} - ) - self.metadatas[bind_key] = metadata - return metadata - - def _make_table_class(self) -> type[_Table]: - """Create a SQLAlchemy :class:`sqlalchemy.schema.Table` class that chooses a - metadata automatically based on the ``bind_key``. The result is available as - :attr:`Table`. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - .. versionadded:: 3.0 - """ - - class Table(_Table): - def __new__( - cls, *args: t.Any, bind_key: str | None = None, **kwargs: t.Any - ) -> Table: - # If a metadata arg is passed, go directly to the base Table. Also do - # this for no args so the correct error is shown. - if not args or (len(args) >= 2 and isinstance(args[1], sa.MetaData)): - return super().__new__(cls, *args, **kwargs) - - metadata = self._make_metadata(bind_key) - return super().__new__(cls, *[args[0], metadata, *args[1:]], **kwargs) - - return Table - - def _make_declarative_base( - self, - model_class: _FSA_MCT, - disable_autonaming: bool = False, - ) -> t.Type[_FSAModel]: - """Create a SQLAlchemy declarative model class. The result is available as - :attr:`Model`. - - To customize, subclass :class:`.Model` and pass it as ``model_class`` to - :class:`SQLAlchemy`. To customize at the metaclass level, pass an already - created declarative model class as ``model_class``. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param model_class: A model base class, or an already created declarative model - class. - - :param disable_autonaming: Turns off automatic tablename generation in models. - - .. versionchanged:: 3.1.0 - Added support for passing SQLAlchemy 2.x base class as model class. - Added optional ``disable_autonaming`` parameter. - - .. versionchanged:: 3.0 - Renamed with a leading underscore, this method is internal. - - .. versionchanged:: 2.3 - ``model`` can be an already created declarative model class. - """ - model: t.Type[_FSAModel] - declarative_bases = _get_2x_declarative_bases(model_class) - if len(declarative_bases) > 1: - # raise error if more than one declarative base is found - raise ValueError( - "Only one declarative base can be passed to SQLAlchemy." - " Got: {}".format(model_class.__bases__) - ) - elif len(declarative_bases) == 1: - body = dict(model_class.__dict__) - body["__fsa__"] = self - mixin_classes = [BindMixin, NameMixin, Model] - if disable_autonaming: - mixin_classes.remove(NameMixin) - model = types.new_class( - "FlaskSQLAlchemyBase", - (*mixin_classes, *model_class.__bases__), - {"metaclass": type(declarative_bases[0])}, - lambda ns: ns.update(body), - ) - elif not isinstance(model_class, sa_orm.DeclarativeMeta): - metadata = self._make_metadata(None) - metaclass = DefaultMetaNoName if disable_autonaming else DefaultMeta - model = sa_orm.declarative_base( - metadata=metadata, cls=model_class, name="Model", metaclass=metaclass - ) - else: - model = model_class # type: ignore[assignment] - - if None not in self.metadatas: - # Use the model's metadata as the default metadata. - model.metadata.info["bind_key"] = None - self.metadatas[None] = model.metadata - else: - # Use the passed in default metadata as the model's metadata. - model.metadata = self.metadatas[None] - - model.query_class = self.Query - model.query = _QueryProperty() # type: ignore[assignment] - model.__fsa__ = self - return model - - def _apply_driver_defaults(self, options: dict[str, t.Any], app: Flask) -> None: - """Apply driver-specific configuration to an engine. - - SQLite in-memory databases use ``StaticPool`` and disable ``check_same_thread``. - File paths are relative to the app's :attr:`~flask.Flask.instance_path`, - which is created if it doesn't exist. - - MySQL sets ``charset="utf8mb4"``, and ``pool_timeout`` defaults to 2 hours. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param options: Arguments passed to the engine. - :param app: The application that the engine configuration belongs to. - - .. versionchanged:: 3.0 - SQLite paths are relative to ``app.instance_path``. It does not use - ``NullPool`` if ``pool_size`` is 0. Driver-level URIs are supported. - - .. versionchanged:: 3.0 - MySQL sets ``charset="utf8mb4". It does not set ``pool_size`` to 10. It - does not set ``pool_recycle`` if not using a queue pool. - - .. versionchanged:: 3.0 - Renamed from ``apply_driver_hacks``, this method is internal. It does not - return anything. - - .. versionchanged:: 2.5 - Returns ``(sa_url, options)``. - """ - url = sa.engine.make_url(options["url"]) - - if url.drivername in {"sqlite", "sqlite+pysqlite"}: - if url.database is None or url.database in {"", ":memory:"}: - options["poolclass"] = sa.pool.StaticPool - - if "connect_args" not in options: - options["connect_args"] = {} - - options["connect_args"]["check_same_thread"] = False - else: - # the url might look like sqlite:///file:path?uri=true - is_uri = url.query.get("uri", False) - - if is_uri: - db_str = url.database[5:] - else: - db_str = url.database - - if not os.path.isabs(db_str): - os.makedirs(app.instance_path, exist_ok=True) - db_str = os.path.join(app.instance_path, db_str) - - if is_uri: - db_str = f"file:{db_str}" - - options["url"] = url.set(database=db_str) - elif url.drivername.startswith("mysql"): - # set queue defaults only when using queue pool - if ( - "pool_class" not in options - or options["pool_class"] is sa.pool.QueuePool - ): - options.setdefault("pool_recycle", 7200) - - if "charset" not in url.query: - options["url"] = url.update_query_dict({"charset": "utf8mb4"}) - - def _make_engine( - self, bind_key: str | None, options: dict[str, t.Any], app: Flask - ) -> sa.engine.Engine: - """Create the :class:`sqlalchemy.engine.Engine` for the given bind key and app. - - To customize, use :data:`.SQLALCHEMY_ENGINE_OPTIONS` or - :data:`.SQLALCHEMY_BINDS` config. Pass ``engine_options`` to :class:`SQLAlchemy` - to set defaults for all engines. - - This method is used for internal setup. Its signature may change at any time. - - :meta private: - - :param bind_key: The name of the engine being created. - :param options: Arguments passed to the engine. - :param app: The application that the engine configuration belongs to. - - .. versionchanged:: 3.0 - Renamed from ``create_engine``, this method is internal. - """ - return sa.engine_from_config(options, prefix="") - - @property - def metadata(self) -> sa.MetaData: - """The default metadata used by :attr:`Model` and :attr:`Table` if no bind key - is set. - """ - return self.metadatas[None] - - @property - def engines(self) -> t.Mapping[str | None, sa.engine.Engine]: - """Map of bind keys to :class:`sqlalchemy.engine.Engine` instances for current - application. The ``None`` key refers to the default engine, and is available as - :attr:`engine`. - - To customize, set the :data:`.SQLALCHEMY_BINDS` config, and set defaults by - passing the ``engine_options`` parameter to the extension. - - This requires that a Flask application context is active. - - .. versionadded:: 3.0 - """ - app = current_app._get_current_object() # type: ignore[attr-defined] - - if app not in self._app_engines: - raise RuntimeError( - "The current Flask app is not registered with this 'SQLAlchemy'" - " instance. Did you forget to call 'init_app', or did you create" - " multiple 'SQLAlchemy' instances?" - ) - - return self._app_engines[app] - - @property - def engine(self) -> sa.engine.Engine: - """The default :class:`~sqlalchemy.engine.Engine` for the current application, - used by :attr:`session` if the :attr:`Model` or :attr:`Table` being queried does - not set a bind key. - - To customize, set the :data:`.SQLALCHEMY_ENGINE_OPTIONS` config, and set - defaults by passing the ``engine_options`` parameter to the extension. - - This requires that a Flask application context is active. - """ - return self.engines[None] - - def get_engine( - self, bind_key: str | None = None, **kwargs: t.Any - ) -> sa.engine.Engine: - """Get the engine for the given bind key for the current application. - This requires that a Flask application context is active. - - :param bind_key: The name of the engine. - - .. deprecated:: 3.0 - Will be removed in Flask-SQLAlchemy 3.2. Use ``engines[key]`` instead. - - .. versionchanged:: 3.0 - Renamed the ``bind`` parameter to ``bind_key``. Removed the ``app`` - parameter. - """ - warnings.warn( - "'get_engine' is deprecated and will be removed in Flask-SQLAlchemy" - " 3.2. Use 'engine' or 'engines[key]' instead. If you're using" - " Flask-Migrate or Alembic, you'll need to update your 'env.py' file.", - DeprecationWarning, - stacklevel=2, - ) - - if "bind" in kwargs: - bind_key = kwargs.pop("bind") - - return self.engines[bind_key] - - def get_or_404( - self, - entity: type[_O], - ident: t.Any, - *, - description: str | None = None, - **kwargs: t.Any, - ) -> _O: - """Like :meth:`session.get() ` but aborts with a - ``404 Not Found`` error instead of returning ``None``. - - :param entity: The model class to query. - :param ident: The primary key to query. - :param description: A custom message to show on the error page. - :param kwargs: Extra arguments passed to ``session.get()``. - - .. versionchanged:: 3.1 - Pass extra keyword arguments to ``session.get()``. - - .. versionadded:: 3.0 - """ - value = self.session.get(entity, ident, **kwargs) - - if value is None: - abort(404, description=description) - - return value - - def first_or_404( - self, statement: sa.sql.Select[t.Any], *, description: str | None = None - ) -> t.Any: - """Like :meth:`Result.scalar() `, but aborts - with a ``404 Not Found`` error instead of returning ``None``. - - :param statement: The ``select`` statement to execute. - :param description: A custom message to show on the error page. - - .. versionadded:: 3.0 - """ - value = self.session.execute(statement).scalar() - - if value is None: - abort(404, description=description) - - return value - - def one_or_404( - self, statement: sa.sql.Select[t.Any], *, description: str | None = None - ) -> t.Any: - """Like :meth:`Result.scalar_one() `, - but aborts with a ``404 Not Found`` error instead of raising ``NoResultFound`` - or ``MultipleResultsFound``. - - :param statement: The ``select`` statement to execute. - :param description: A custom message to show on the error page. - - .. versionadded:: 3.0 - """ - try: - return self.session.execute(statement).scalar_one() - except (sa_exc.NoResultFound, sa_exc.MultipleResultsFound): - abort(404, description=description) - - def paginate( - self, - select: sa.sql.Select[t.Any], - *, - page: int | None = None, - per_page: int | None = None, - max_per_page: int | None = None, - error_out: bool = True, - count: bool = True, - ) -> Pagination: - """Apply an offset and limit to a select statment based on the current page and - number of items per page, returning a :class:`.Pagination` object. - - The statement should select a model class, like ``select(User)``. This applies - ``unique()`` and ``scalars()`` modifiers to the result, so compound selects will - not return the expected results. - - :param select: The ``select`` statement to paginate. - :param page: The current page, used to calculate the offset. Defaults to the - ``page`` query arg during a request, or 1 otherwise. - :param per_page: The maximum number of items on a page, used to calculate the - offset and limit. Defaults to the ``per_page`` query arg during a request, - or 20 otherwise. - :param max_per_page: The maximum allowed value for ``per_page``, to limit a - user-provided value. Use ``None`` for no limit. Defaults to 100. - :param error_out: Abort with a ``404 Not Found`` error if no items are returned - and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if - either are not ints. - :param count: Calculate the total number of values by issuing an extra count - query. For very complex queries this may be inaccurate or slow, so it can be - disabled and set manually if necessary. - - .. versionchanged:: 3.0 - The ``count`` query is more efficient. - - .. versionadded:: 3.0 - """ - return SelectPagination( - select=select, - session=self.session(), - page=page, - per_page=per_page, - max_per_page=max_per_page, - error_out=error_out, - count=count, - ) - - def _call_for_binds( - self, bind_key: str | None | list[str | None], op_name: str - ) -> None: - """Call a method on each metadata. - - :meta private: - - :param bind_key: A bind key or list of keys. Defaults to all binds. - :param op_name: The name of the method to call. - - .. versionchanged:: 3.0 - Renamed from ``_execute_for_all_tables``. - """ - if bind_key == "__all__": - keys: list[str | None] = list(self.metadatas) - elif bind_key is None or isinstance(bind_key, str): - keys = [bind_key] - else: - keys = bind_key - - for key in keys: - try: - engine = self.engines[key] - except KeyError: - message = f"Bind key '{key}' is not in 'SQLALCHEMY_BINDS' config." - - if key is None: - message = f"'SQLALCHEMY_DATABASE_URI' config is not set. {message}" - - raise sa_exc.UnboundExecutionError(message) from None - - metadata = self.metadatas[key] - getattr(metadata, op_name)(bind=engine) - - def create_all(self, bind_key: str | None | list[str | None] = "__all__") -> None: - """Create tables that do not exist in the database by calling - ``metadata.create_all()`` for all or some bind keys. This does not - update existing tables, use a migration library for that. - - This requires that a Flask application context is active. - - :param bind_key: A bind key or list of keys to create the tables for. Defaults - to all binds. - - .. versionchanged:: 3.0 - Renamed the ``bind`` parameter to ``bind_key``. Removed the ``app`` - parameter. - - .. versionchanged:: 0.12 - Added the ``bind`` and ``app`` parameters. - """ - self._call_for_binds(bind_key, "create_all") - - def drop_all(self, bind_key: str | None | list[str | None] = "__all__") -> None: - """Drop tables by calling ``metadata.drop_all()`` for all or some bind keys. - - This requires that a Flask application context is active. - - :param bind_key: A bind key or list of keys to drop the tables from. Defaults to - all binds. - - .. versionchanged:: 3.0 - Renamed the ``bind`` parameter to ``bind_key``. Removed the ``app`` - parameter. - - .. versionchanged:: 0.12 - Added the ``bind`` and ``app`` parameters. - """ - self._call_for_binds(bind_key, "drop_all") - - def reflect(self, bind_key: str | None | list[str | None] = "__all__") -> None: - """Load table definitions from the database by calling ``metadata.reflect()`` - for all or some bind keys. - - This requires that a Flask application context is active. - - :param bind_key: A bind key or list of keys to reflect the tables from. Defaults - to all binds. - - .. versionchanged:: 3.0 - Renamed the ``bind`` parameter to ``bind_key``. Removed the ``app`` - parameter. - - .. versionchanged:: 0.12 - Added the ``bind`` and ``app`` parameters. - """ - self._call_for_binds(bind_key, "reflect") - - def _set_rel_query(self, kwargs: dict[str, t.Any]) -> None: - """Apply the extension's :attr:`Query` class as the default for relationships - and backrefs. - - :meta private: - """ - kwargs.setdefault("query_class", self.Query) - - if "backref" in kwargs: - backref = kwargs["backref"] - - if isinstance(backref, str): - backref = (backref, {}) - - backref[1].setdefault("query_class", self.Query) - - def relationship( - self, *args: t.Any, **kwargs: t.Any - ) -> sa_orm.RelationshipProperty[t.Any]: - """A :func:`sqlalchemy.orm.relationship` that applies this extension's - :attr:`Query` class for dynamic relationships and backrefs. - - .. versionchanged:: 3.0 - The :attr:`Query` class is set on ``backref``. - """ - self._set_rel_query(kwargs) - return sa_orm.relationship(*args, **kwargs) - - def dynamic_loader( - self, argument: t.Any, **kwargs: t.Any - ) -> sa_orm.RelationshipProperty[t.Any]: - """A :func:`sqlalchemy.orm.dynamic_loader` that applies this extension's - :attr:`Query` class for relationships and backrefs. - - .. versionchanged:: 3.0 - The :attr:`Query` class is set on ``backref``. - """ - self._set_rel_query(kwargs) - return sa_orm.dynamic_loader(argument, **kwargs) - - def _relation( - self, *args: t.Any, **kwargs: t.Any - ) -> sa_orm.RelationshipProperty[t.Any]: - """A :func:`sqlalchemy.orm.relationship` that applies this extension's - :attr:`Query` class for dynamic relationships and backrefs. - - SQLAlchemy 2.0 removes this name, use ``relationship`` instead. - - :meta private: - - .. versionchanged:: 3.0 - The :attr:`Query` class is set on ``backref``. - """ - self._set_rel_query(kwargs) - f = sa_orm.relationship - return f(*args, **kwargs) - - def __getattr__(self, name: str) -> t.Any: - if name == "relation": - return self._relation - - if name == "event": - return sa_event - - if name.startswith("_"): - raise AttributeError(name) - - for mod in (sa, sa_orm): - if hasattr(mod, name): - return getattr(mod, name) - - raise AttributeError(name) diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/model.py b/myenv/Lib/site-packages/flask_sqlalchemy/model.py deleted file mode 100644 index c6f9e5a..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy/model.py +++ /dev/null @@ -1,330 +0,0 @@ -from __future__ import annotations - -import re -import typing as t - -import sqlalchemy as sa -import sqlalchemy.orm as sa_orm - -from .query import Query - -if t.TYPE_CHECKING: - from .extension import SQLAlchemy - - -class _QueryProperty: - """A class property that creates a query object for a model. - - :meta private: - """ - - def __get__(self, obj: Model | None, cls: type[Model]) -> Query: - return cls.query_class( - cls, session=cls.__fsa__.session() # type: ignore[arg-type] - ) - - -class Model: - """The base class of the :attr:`.SQLAlchemy.Model` declarative model class. - - To define models, subclass :attr:`db.Model <.SQLAlchemy.Model>`, not this. To - customize ``db.Model``, subclass this and pass it as ``model_class`` to - :class:`.SQLAlchemy`. To customize ``db.Model`` at the metaclass level, pass an - already created declarative model class as ``model_class``. - """ - - __fsa__: t.ClassVar[SQLAlchemy] - """Internal reference to the extension object. - - :meta private: - """ - - query_class: t.ClassVar[type[Query]] = Query - """Query class used by :attr:`query`. Defaults to :attr:`.SQLAlchemy.Query`, which - defaults to :class:`.Query`. - """ - - query: t.ClassVar[Query] = _QueryProperty() # type: ignore[assignment] - """A SQLAlchemy query for a model. Equivalent to ``db.session.query(Model)``. Can be - customized per-model by overriding :attr:`query_class`. - - .. warning:: - The query interface is considered legacy in SQLAlchemy. Prefer using - ``session.execute(select())`` instead. - """ - - def __repr__(self) -> str: - state = sa.inspect(self) - assert state is not None - - if state.transient: - pk = f"(transient {id(self)})" - elif state.pending: - pk = f"(pending {id(self)})" - else: - pk = ", ".join(map(str, state.identity)) - - return f"<{type(self).__name__} {pk}>" - - -class BindMetaMixin(type): - """Metaclass mixin that sets a model's ``metadata`` based on its ``__bind_key__``. - - If the model sets ``metadata`` or ``__table__`` directly, ``__bind_key__`` is - ignored. If the ``metadata`` is the same as the parent model, it will not be set - directly on the child model. - """ - - __fsa__: SQLAlchemy - metadata: sa.MetaData - - def __init__( - cls, name: str, bases: tuple[type, ...], d: dict[str, t.Any], **kwargs: t.Any - ) -> None: - if not ("metadata" in cls.__dict__ or "__table__" in cls.__dict__): - bind_key = getattr(cls, "__bind_key__", None) - parent_metadata = getattr(cls, "metadata", None) - metadata = cls.__fsa__._make_metadata(bind_key) - - if metadata is not parent_metadata: - cls.metadata = metadata - - super().__init__(name, bases, d, **kwargs) - - -class BindMixin: - """DeclarativeBase mixin to set a model's ``metadata`` based on ``__bind_key__``. - - If no ``__bind_key__`` is specified, the model will use the default metadata - provided by ``DeclarativeBase`` or ``DeclarativeBaseNoMeta``. - If the model doesn't set ``metadata`` or ``__table__`` directly - and does set ``__bind_key__``, the model will use the metadata - for the specified bind key. - If the ``metadata`` is the same as the parent model, it will not be set - directly on the child model. - - .. versionchanged:: 3.1.0 - """ - - __fsa__: SQLAlchemy - metadata: sa.MetaData - - @classmethod - def __init_subclass__(cls: t.Type[BindMixin], **kwargs: t.Dict[str, t.Any]) -> None: - if not ("metadata" in cls.__dict__ or "__table__" in cls.__dict__) and hasattr( - cls, "__bind_key__" - ): - bind_key = getattr(cls, "__bind_key__", None) - parent_metadata = getattr(cls, "metadata", None) - metadata = cls.__fsa__._make_metadata(bind_key) - - if metadata is not parent_metadata: - cls.metadata = metadata - - super().__init_subclass__(**kwargs) - - -class NameMetaMixin(type): - """Metaclass mixin that sets a model's ``__tablename__`` by converting the - ``CamelCase`` class name to ``snake_case``. A name is set for non-abstract models - that do not otherwise define ``__tablename__``. If a model does not define a primary - key, it will not generate a name or ``__table__``, for single-table inheritance. - """ - - metadata: sa.MetaData - __tablename__: str - __table__: sa.Table - - def __init__( - cls, name: str, bases: tuple[type, ...], d: dict[str, t.Any], **kwargs: t.Any - ) -> None: - if should_set_tablename(cls): - cls.__tablename__ = camel_to_snake_case(cls.__name__) - - super().__init__(name, bases, d, **kwargs) - - # __table_cls__ has run. If no table was created, use the parent table. - if ( - "__tablename__" not in cls.__dict__ - and "__table__" in cls.__dict__ - and cls.__dict__["__table__"] is None - ): - del cls.__table__ - - def __table_cls__(cls, *args: t.Any, **kwargs: t.Any) -> sa.Table | None: - """This is called by SQLAlchemy during mapper setup. It determines the final - table object that the model will use. - - If no primary key is found, that indicates single-table inheritance, so no table - will be created and ``__tablename__`` will be unset. - """ - schema = kwargs.get("schema") - - if schema is None: - key = args[0] - else: - key = f"{schema}.{args[0]}" - - # Check if a table with this name already exists. Allows reflected tables to be - # applied to models by name. - if key in cls.metadata.tables: - return sa.Table(*args, **kwargs) - - # If a primary key is found, create a table for joined-table inheritance. - for arg in args: - if (isinstance(arg, sa.Column) and arg.primary_key) or isinstance( - arg, sa.PrimaryKeyConstraint - ): - return sa.Table(*args, **kwargs) - - # If no base classes define a table, return one that's missing a primary key - # so SQLAlchemy shows the correct error. - for base in cls.__mro__[1:-1]: - if "__table__" in base.__dict__: - break - else: - return sa.Table(*args, **kwargs) - - # Single-table inheritance, use the parent table name. __init__ will unset - # __table__ based on this. - if "__tablename__" in cls.__dict__: - del cls.__tablename__ - - return None - - -class NameMixin: - """DeclarativeBase mixin that sets a model's ``__tablename__`` by converting the - ``CamelCase`` class name to ``snake_case``. A name is set for non-abstract models - that do not otherwise define ``__tablename__``. If a model does not define a primary - key, it will not generate a name or ``__table__``, for single-table inheritance. - - .. versionchanged:: 3.1.0 - """ - - metadata: sa.MetaData - __tablename__: str - __table__: sa.Table - - @classmethod - def __init_subclass__(cls: t.Type[NameMixin], **kwargs: t.Dict[str, t.Any]) -> None: - if should_set_tablename(cls): - cls.__tablename__ = camel_to_snake_case(cls.__name__) - - super().__init_subclass__(**kwargs) - - # __table_cls__ has run. If no table was created, use the parent table. - if ( - "__tablename__" not in cls.__dict__ - and "__table__" in cls.__dict__ - and cls.__dict__["__table__"] is None - ): - del cls.__table__ - - @classmethod - def __table_cls__(cls, *args: t.Any, **kwargs: t.Any) -> sa.Table | None: - """This is called by SQLAlchemy during mapper setup. It determines the final - table object that the model will use. - - If no primary key is found, that indicates single-table inheritance, so no table - will be created and ``__tablename__`` will be unset. - """ - schema = kwargs.get("schema") - - if schema is None: - key = args[0] - else: - key = f"{schema}.{args[0]}" - - # Check if a table with this name already exists. Allows reflected tables to be - # applied to models by name. - if key in cls.metadata.tables: - return sa.Table(*args, **kwargs) - - # If a primary key is found, create a table for joined-table inheritance. - for arg in args: - if (isinstance(arg, sa.Column) and arg.primary_key) or isinstance( - arg, sa.PrimaryKeyConstraint - ): - return sa.Table(*args, **kwargs) - - # If no base classes define a table, return one that's missing a primary key - # so SQLAlchemy shows the correct error. - for base in cls.__mro__[1:-1]: - if "__table__" in base.__dict__: - break - else: - return sa.Table(*args, **kwargs) - - # Single-table inheritance, use the parent table name. __init__ will unset - # __table__ based on this. - if "__tablename__" in cls.__dict__: - del cls.__tablename__ - - return None - - -def should_set_tablename(cls: type) -> bool: - """Determine whether ``__tablename__`` should be generated for a model. - - - If no class in the MRO sets a name, one should be generated. - - If a declared attr is found, it should be used instead. - - If a name is found, it should be used if the class is a mixin, otherwise one - should be generated. - - Abstract models should not have one generated. - - Later, ``__table_cls__`` will determine if the model looks like single or - joined-table inheritance. If no primary key is found, the name will be unset. - """ - if ( - cls.__dict__.get("__abstract__", False) - or ( - not issubclass(cls, (sa_orm.DeclarativeBase, sa_orm.DeclarativeBaseNoMeta)) - and not any(isinstance(b, sa_orm.DeclarativeMeta) for b in cls.__mro__[1:]) - ) - or any( - (b is sa_orm.DeclarativeBase or b is sa_orm.DeclarativeBaseNoMeta) - for b in cls.__bases__ - ) - ): - return False - - for base in cls.__mro__: - if "__tablename__" not in base.__dict__: - continue - - if isinstance(base.__dict__["__tablename__"], sa_orm.declared_attr): - return False - - return not ( - base is cls - or base.__dict__.get("__abstract__", False) - or not ( - # SQLAlchemy 1.x - isinstance(base, sa_orm.DeclarativeMeta) - # 2.x: DeclarativeBas uses this as metaclass - or isinstance(base, sa_orm.decl_api.DeclarativeAttributeIntercept) - # 2.x: DeclarativeBaseNoMeta doesn't use a metaclass - or issubclass(base, sa_orm.DeclarativeBaseNoMeta) - ) - ) - - return True - - -def camel_to_snake_case(name: str) -> str: - """Convert a ``CamelCase`` name to ``snake_case``.""" - name = re.sub(r"((?<=[a-z0-9])[A-Z]|(?!^)[A-Z](?=[a-z]))", r"_\1", name) - return name.lower().lstrip("_") - - -class DefaultMeta(BindMetaMixin, NameMetaMixin, sa_orm.DeclarativeMeta): - """SQLAlchemy declarative metaclass that provides ``__bind_key__`` and - ``__tablename__`` support. - """ - - -class DefaultMetaNoName(BindMetaMixin, sa_orm.DeclarativeMeta): - """SQLAlchemy declarative metaclass that provides ``__bind_key__`` and - ``__tablename__`` support. - """ diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/pagination.py b/myenv/Lib/site-packages/flask_sqlalchemy/pagination.py deleted file mode 100644 index 3d49d6e..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy/pagination.py +++ /dev/null @@ -1,364 +0,0 @@ -from __future__ import annotations - -import typing as t -from math import ceil - -import sqlalchemy as sa -import sqlalchemy.orm as sa_orm -from flask import abort -from flask import request - - -class Pagination: - """Apply an offset and limit to the query based on the current page and number of - items per page. - - Don't create pagination objects manually. They are created by - :meth:`.SQLAlchemy.paginate` and :meth:`.Query.paginate`. - - This is a base class, a subclass must implement :meth:`_query_items` and - :meth:`_query_count`. Those methods will use arguments passed as ``kwargs`` to - perform the queries. - - :param page: The current page, used to calculate the offset. Defaults to the - ``page`` query arg during a request, or 1 otherwise. - :param per_page: The maximum number of items on a page, used to calculate the - offset and limit. Defaults to the ``per_page`` query arg during a request, - or 20 otherwise. - :param max_per_page: The maximum allowed value for ``per_page``, to limit a - user-provided value. Use ``None`` for no limit. Defaults to 100. - :param error_out: Abort with a ``404 Not Found`` error if no items are returned - and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if - either are not ints. - :param count: Calculate the total number of values by issuing an extra count - query. For very complex queries this may be inaccurate or slow, so it can be - disabled and set manually if necessary. - :param kwargs: Information about the query to paginate. Different subclasses will - require different arguments. - - .. versionchanged:: 3.0 - Iterating over a pagination object iterates over its items. - - .. versionchanged:: 3.0 - Creating instances manually is not a public API. - """ - - def __init__( - self, - page: int | None = None, - per_page: int | None = None, - max_per_page: int | None = 100, - error_out: bool = True, - count: bool = True, - **kwargs: t.Any, - ) -> None: - self._query_args = kwargs - page, per_page = self._prepare_page_args( - page=page, - per_page=per_page, - max_per_page=max_per_page, - error_out=error_out, - ) - - self.page: int = page - """The current page.""" - - self.per_page: int = per_page - """The maximum number of items on a page.""" - - self.max_per_page: int | None = max_per_page - """The maximum allowed value for ``per_page``.""" - - items = self._query_items() - - if not items and page != 1 and error_out: - abort(404) - - self.items: list[t.Any] = items - """The items on the current page. Iterating over the pagination object is - equivalent to iterating over the items. - """ - - if count: - total = self._query_count() - else: - total = None - - self.total: int | None = total - """The total number of items across all pages.""" - - @staticmethod - def _prepare_page_args( - *, - page: int | None = None, - per_page: int | None = None, - max_per_page: int | None = None, - error_out: bool = True, - ) -> tuple[int, int]: - if request: - if page is None: - try: - page = int(request.args.get("page", 1)) - except (TypeError, ValueError): - if error_out: - abort(404) - - page = 1 - - if per_page is None: - try: - per_page = int(request.args.get("per_page", 20)) - except (TypeError, ValueError): - if error_out: - abort(404) - - per_page = 20 - else: - if page is None: - page = 1 - - if per_page is None: - per_page = 20 - - if max_per_page is not None: - per_page = min(per_page, max_per_page) - - if page < 1: - if error_out: - abort(404) - else: - page = 1 - - if per_page < 1: - if error_out: - abort(404) - else: - per_page = 20 - - return page, per_page - - @property - def _query_offset(self) -> int: - """The index of the first item to query, passed to ``offset()``. - - :meta private: - - .. versionadded:: 3.0 - """ - return (self.page - 1) * self.per_page - - def _query_items(self) -> list[t.Any]: - """Execute the query to get the items on the current page. - - Uses init arguments stored in :attr:`_query_args`. - - :meta private: - - .. versionadded:: 3.0 - """ - raise NotImplementedError - - def _query_count(self) -> int: - """Execute the query to get the total number of items. - - Uses init arguments stored in :attr:`_query_args`. - - :meta private: - - .. versionadded:: 3.0 - """ - raise NotImplementedError - - @property - def first(self) -> int: - """The number of the first item on the page, starting from 1, or 0 if there are - no items. - - .. versionadded:: 3.0 - """ - if len(self.items) == 0: - return 0 - - return (self.page - 1) * self.per_page + 1 - - @property - def last(self) -> int: - """The number of the last item on the page, starting from 1, inclusive, or 0 if - there are no items. - - .. versionadded:: 3.0 - """ - first = self.first - return max(first, first + len(self.items) - 1) - - @property - def pages(self) -> int: - """The total number of pages.""" - if self.total == 0 or self.total is None: - return 0 - - return ceil(self.total / self.per_page) - - @property - def has_prev(self) -> bool: - """``True`` if this is not the first page.""" - return self.page > 1 - - @property - def prev_num(self) -> int | None: - """The previous page number, or ``None`` if this is the first page.""" - if not self.has_prev: - return None - - return self.page - 1 - - def prev(self, *, error_out: bool = False) -> Pagination: - """Query the :class:`Pagination` object for the previous page. - - :param error_out: Abort with a ``404 Not Found`` error if no items are returned - and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if - either are not ints. - """ - p = type(self)( - page=self.page - 1, - per_page=self.per_page, - error_out=error_out, - count=False, - **self._query_args, - ) - p.total = self.total - return p - - @property - def has_next(self) -> bool: - """``True`` if this is not the last page.""" - return self.page < self.pages - - @property - def next_num(self) -> int | None: - """The next page number, or ``None`` if this is the last page.""" - if not self.has_next: - return None - - return self.page + 1 - - def next(self, *, error_out: bool = False) -> Pagination: - """Query the :class:`Pagination` object for the next page. - - :param error_out: Abort with a ``404 Not Found`` error if no items are returned - and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if - either are not ints. - """ - p = type(self)( - page=self.page + 1, - per_page=self.per_page, - max_per_page=self.max_per_page, - error_out=error_out, - count=False, - **self._query_args, - ) - p.total = self.total - return p - - def iter_pages( - self, - *, - left_edge: int = 2, - left_current: int = 2, - right_current: int = 4, - right_edge: int = 2, - ) -> t.Iterator[int | None]: - """Yield page numbers for a pagination widget. Skipped pages between the edges - and middle are represented by a ``None``. - - For example, if there are 20 pages and the current page is 7, the following - values are yielded. - - .. code-block:: python - - 1, 2, None, 5, 6, 7, 8, 9, 10, 11, None, 19, 20 - - :param left_edge: How many pages to show from the first page. - :param left_current: How many pages to show left of the current page. - :param right_current: How many pages to show right of the current page. - :param right_edge: How many pages to show from the last page. - - .. versionchanged:: 3.0 - Improved efficiency of calculating what to yield. - - .. versionchanged:: 3.0 - ``right_current`` boundary is inclusive. - - .. versionchanged:: 3.0 - All parameters are keyword-only. - """ - pages_end = self.pages + 1 - - if pages_end == 1: - return - - left_end = min(1 + left_edge, pages_end) - yield from range(1, left_end) - - if left_end == pages_end: - return - - mid_start = max(left_end, self.page - left_current) - mid_end = min(self.page + right_current + 1, pages_end) - - if mid_start - left_end > 0: - yield None - - yield from range(mid_start, mid_end) - - if mid_end == pages_end: - return - - right_start = max(mid_end, pages_end - right_edge) - - if right_start - mid_end > 0: - yield None - - yield from range(right_start, pages_end) - - def __iter__(self) -> t.Iterator[t.Any]: - yield from self.items - - -class SelectPagination(Pagination): - """Returned by :meth:`.SQLAlchemy.paginate`. Takes ``select`` and ``session`` - arguments in addition to the :class:`Pagination` arguments. - - .. versionadded:: 3.0 - """ - - def _query_items(self) -> list[t.Any]: - select = self._query_args["select"] - select = select.limit(self.per_page).offset(self._query_offset) - session = self._query_args["session"] - return list(session.execute(select).unique().scalars()) - - def _query_count(self) -> int: - select = self._query_args["select"] - sub = select.options(sa_orm.lazyload("*")).order_by(None).subquery() - session = self._query_args["session"] - out = session.execute(sa.select(sa.func.count()).select_from(sub)).scalar() - return out # type: ignore[no-any-return] - - -class QueryPagination(Pagination): - """Returned by :meth:`.Query.paginate`. Takes a ``query`` argument in addition to - the :class:`Pagination` arguments. - - .. versionadded:: 3.0 - """ - - def _query_items(self) -> list[t.Any]: - query = self._query_args["query"] - out = query.limit(self.per_page).offset(self._query_offset).all() - return out # type: ignore[no-any-return] - - def _query_count(self) -> int: - # Query.count automatically disables eager loads - out = self._query_args["query"].order_by(None).count() - return out # type: ignore[no-any-return] diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/py.typed b/myenv/Lib/site-packages/flask_sqlalchemy/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/query.py b/myenv/Lib/site-packages/flask_sqlalchemy/query.py deleted file mode 100644 index 35f927d..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy/query.py +++ /dev/null @@ -1,105 +0,0 @@ -from __future__ import annotations - -import typing as t - -import sqlalchemy.exc as sa_exc -import sqlalchemy.orm as sa_orm -from flask import abort - -from .pagination import Pagination -from .pagination import QueryPagination - - -class Query(sa_orm.Query): # type: ignore[type-arg] - """SQLAlchemy :class:`~sqlalchemy.orm.query.Query` subclass with some extra methods - useful for querying in a web application. - - This is the default query class for :attr:`.Model.query`. - - .. versionchanged:: 3.0 - Renamed to ``Query`` from ``BaseQuery``. - """ - - def get_or_404(self, ident: t.Any, description: str | None = None) -> t.Any: - """Like :meth:`~sqlalchemy.orm.Query.get` but aborts with a ``404 Not Found`` - error instead of returning ``None``. - - :param ident: The primary key to query. - :param description: A custom message to show on the error page. - """ - rv = self.get(ident) - - if rv is None: - abort(404, description=description) - - return rv - - def first_or_404(self, description: str | None = None) -> t.Any: - """Like :meth:`~sqlalchemy.orm.Query.first` but aborts with a ``404 Not Found`` - error instead of returning ``None``. - - :param description: A custom message to show on the error page. - """ - rv = self.first() - - if rv is None: - abort(404, description=description) - - return rv - - def one_or_404(self, description: str | None = None) -> t.Any: - """Like :meth:`~sqlalchemy.orm.Query.one` but aborts with a ``404 Not Found`` - error instead of raising ``NoResultFound`` or ``MultipleResultsFound``. - - :param description: A custom message to show on the error page. - - .. versionadded:: 3.0 - """ - try: - return self.one() - except (sa_exc.NoResultFound, sa_exc.MultipleResultsFound): - abort(404, description=description) - - def paginate( - self, - *, - page: int | None = None, - per_page: int | None = None, - max_per_page: int | None = None, - error_out: bool = True, - count: bool = True, - ) -> Pagination: - """Apply an offset and limit to the query based on the current page and number - of items per page, returning a :class:`.Pagination` object. - - :param page: The current page, used to calculate the offset. Defaults to the - ``page`` query arg during a request, or 1 otherwise. - :param per_page: The maximum number of items on a page, used to calculate the - offset and limit. Defaults to the ``per_page`` query arg during a request, - or 20 otherwise. - :param max_per_page: The maximum allowed value for ``per_page``, to limit a - user-provided value. Use ``None`` for no limit. Defaults to 100. - :param error_out: Abort with a ``404 Not Found`` error if no items are returned - and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if - either are not ints. - :param count: Calculate the total number of values by issuing an extra count - query. For very complex queries this may be inaccurate or slow, so it can be - disabled and set manually if necessary. - - .. versionchanged:: 3.0 - All parameters are keyword-only. - - .. versionchanged:: 3.0 - The ``count`` query is more efficient. - - .. versionchanged:: 3.0 - ``max_per_page`` defaults to 100. - """ - return QueryPagination( - query=self, - page=page, - per_page=per_page, - max_per_page=max_per_page, - error_out=error_out, - count=count, - ) diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/record_queries.py b/myenv/Lib/site-packages/flask_sqlalchemy/record_queries.py deleted file mode 100644 index e8273be..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy/record_queries.py +++ /dev/null @@ -1,117 +0,0 @@ -from __future__ import annotations - -import dataclasses -import inspect -import typing as t -from time import perf_counter - -import sqlalchemy as sa -import sqlalchemy.event as sa_event -from flask import current_app -from flask import g -from flask import has_app_context - - -def get_recorded_queries() -> list[_QueryInfo]: - """Get the list of recorded query information for the current session. Queries are - recorded if the config :data:`.SQLALCHEMY_RECORD_QUERIES` is enabled. - - Each query info object has the following attributes: - - ``statement`` - The string of SQL generated by SQLAlchemy with parameter placeholders. - ``parameters`` - The parameters sent with the SQL statement. - ``start_time`` / ``end_time`` - Timing info about when the query started execution and when the results where - returned. Accuracy and value depends on the operating system. - ``duration`` - The time the query took in seconds. - ``location`` - A string description of where in your application code the query was executed. - This may not be possible to calculate, and the format is not stable. - - .. versionchanged:: 3.0 - Renamed from ``get_debug_queries``. - - .. versionchanged:: 3.0 - The info object is a dataclass instead of a tuple. - - .. versionchanged:: 3.0 - The info object attribute ``context`` is renamed to ``location``. - - .. versionchanged:: 3.0 - Not enabled automatically in debug or testing mode. - """ - return g.get("_sqlalchemy_queries", []) # type: ignore[no-any-return] - - -@dataclasses.dataclass -class _QueryInfo: - """Information about an executed query. Returned by :func:`get_recorded_queries`. - - .. versionchanged:: 3.0 - Renamed from ``_DebugQueryTuple``. - - .. versionchanged:: 3.0 - Changed to a dataclass instead of a tuple. - - .. versionchanged:: 3.0 - ``context`` is renamed to ``location``. - """ - - statement: str | None - parameters: t.Any - start_time: float - end_time: float - location: str - - @property - def duration(self) -> float: - return self.end_time - self.start_time - - -def _listen(engine: sa.engine.Engine) -> None: - sa_event.listen(engine, "before_cursor_execute", _record_start, named=True) - sa_event.listen(engine, "after_cursor_execute", _record_end, named=True) - - -def _record_start(context: sa.engine.ExecutionContext, **kwargs: t.Any) -> None: - if not has_app_context(): - return - - context._fsa_start_time = perf_counter() # type: ignore[attr-defined] - - -def _record_end(context: sa.engine.ExecutionContext, **kwargs: t.Any) -> None: - if not has_app_context(): - return - - if "_sqlalchemy_queries" not in g: - g._sqlalchemy_queries = [] - - import_top = current_app.import_name.partition(".")[0] - import_dot = f"{import_top}." - frame = inspect.currentframe() - - while frame: - name = frame.f_globals.get("__name__") - - if name and (name == import_top or name.startswith(import_dot)): - code = frame.f_code - location = f"{code.co_filename}:{frame.f_lineno} ({code.co_name})" - break - - frame = frame.f_back - else: - location = "" - - g._sqlalchemy_queries.append( - _QueryInfo( - statement=context.statement, - parameters=context.parameters, - start_time=context._fsa_start_time, # type: ignore[attr-defined] - end_time=perf_counter(), - location=location, - ) - ) diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/session.py b/myenv/Lib/site-packages/flask_sqlalchemy/session.py deleted file mode 100644 index 631fffa..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy/session.py +++ /dev/null @@ -1,111 +0,0 @@ -from __future__ import annotations - -import typing as t - -import sqlalchemy as sa -import sqlalchemy.exc as sa_exc -import sqlalchemy.orm as sa_orm -from flask.globals import app_ctx - -if t.TYPE_CHECKING: - from .extension import SQLAlchemy - - -class Session(sa_orm.Session): - """A SQLAlchemy :class:`~sqlalchemy.orm.Session` class that chooses what engine to - use based on the bind key associated with the metadata associated with the thing - being queried. - - To customize ``db.session``, subclass this and pass it as the ``class_`` key in the - ``session_options`` to :class:`.SQLAlchemy`. - - .. versionchanged:: 3.0 - Renamed from ``SignallingSession``. - """ - - def __init__(self, db: SQLAlchemy, **kwargs: t.Any) -> None: - super().__init__(**kwargs) - self._db = db - self._model_changes: dict[object, tuple[t.Any, str]] = {} - - def get_bind( - self, - mapper: t.Any | None = None, - clause: t.Any | None = None, - bind: sa.engine.Engine | sa.engine.Connection | None = None, - **kwargs: t.Any, - ) -> sa.engine.Engine | sa.engine.Connection: - """Select an engine based on the ``bind_key`` of the metadata associated with - the model or table being queried. If no bind key is set, uses the default bind. - - .. versionchanged:: 3.0.3 - Fix finding the bind for a joined inheritance model. - - .. versionchanged:: 3.0 - The implementation more closely matches the base SQLAlchemy implementation. - - .. versionchanged:: 2.1 - Support joining an external transaction. - """ - if bind is not None: - return bind - - engines = self._db.engines - - if mapper is not None: - try: - mapper = sa.inspect(mapper) - except sa_exc.NoInspectionAvailable as e: - if isinstance(mapper, type): - raise sa_orm.exc.UnmappedClassError(mapper) from e - - raise - - engine = _clause_to_engine(mapper.local_table, engines) - - if engine is not None: - return engine - - if clause is not None: - engine = _clause_to_engine(clause, engines) - - if engine is not None: - return engine - - if None in engines: - return engines[None] - - return super().get_bind(mapper=mapper, clause=clause, bind=bind, **kwargs) - - -def _clause_to_engine( - clause: sa.ClauseElement | None, - engines: t.Mapping[str | None, sa.engine.Engine], -) -> sa.engine.Engine | None: - """If the clause is a table, return the engine associated with the table's - metadata's bind key. - """ - table = None - - if clause is not None: - if isinstance(clause, sa.Table): - table = clause - elif isinstance(clause, sa.UpdateBase) and isinstance(clause.table, sa.Table): - table = clause.table - - if table is not None and "bind_key" in table.metadata.info: - key = table.metadata.info["bind_key"] - - if key not in engines: - raise sa_exc.UnboundExecutionError( - f"Bind key '{key}' is not in 'SQLALCHEMY_BINDS' config." - ) - - return engines[key] - - return None - - -def _app_ctx_id() -> int: - """Get the id of the current Flask application context for the session scope.""" - return id(app_ctx._get_current_object()) # type: ignore[attr-defined] diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/table.py b/myenv/Lib/site-packages/flask_sqlalchemy/table.py deleted file mode 100644 index ab08a69..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy/table.py +++ /dev/null @@ -1,39 +0,0 @@ -from __future__ import annotations - -import typing as t - -import sqlalchemy as sa -import sqlalchemy.sql.schema as sa_sql_schema - - -class _Table(sa.Table): - @t.overload - def __init__( - self, - name: str, - *args: sa_sql_schema.SchemaItem, - bind_key: str | None = None, - **kwargs: t.Any, - ) -> None: - ... - - @t.overload - def __init__( - self, - name: str, - metadata: sa.MetaData, - *args: sa_sql_schema.SchemaItem, - **kwargs: t.Any, - ) -> None: - ... - - @t.overload - def __init__( - self, name: str, *args: sa_sql_schema.SchemaItem, **kwargs: t.Any - ) -> None: - ... - - def __init__( - self, name: str, *args: sa_sql_schema.SchemaItem, **kwargs: t.Any - ) -> None: - super().__init__(name, *args, **kwargs) # type: ignore[arg-type] diff --git a/myenv/Lib/site-packages/flask_sqlalchemy/track_modifications.py b/myenv/Lib/site-packages/flask_sqlalchemy/track_modifications.py deleted file mode 100644 index 7028b65..0000000 --- a/myenv/Lib/site-packages/flask_sqlalchemy/track_modifications.py +++ /dev/null @@ -1,88 +0,0 @@ -from __future__ import annotations - -import typing as t - -import sqlalchemy as sa -import sqlalchemy.event as sa_event -import sqlalchemy.orm as sa_orm -from flask import current_app -from flask import has_app_context -from flask.signals import Namespace # type: ignore[attr-defined] - -if t.TYPE_CHECKING: - from .session import Session - -_signals = Namespace() - -models_committed = _signals.signal("models-committed") -"""This Blinker signal is sent after the session is committed if there were changed -models in the session. - -The sender is the application that emitted the changes. The receiver is passed the -``changes`` argument with a list of tuples in the form ``(instance, operation)``. -The operations are ``"insert"``, ``"update"``, and ``"delete"``. -""" - -before_models_committed = _signals.signal("before-models-committed") -"""This signal works exactly like :data:`models_committed` but is emitted before the -commit takes place. -""" - - -def _listen(session: sa_orm.scoped_session[Session]) -> None: - sa_event.listen(session, "before_flush", _record_ops, named=True) - sa_event.listen(session, "before_commit", _record_ops, named=True) - sa_event.listen(session, "before_commit", _before_commit) - sa_event.listen(session, "after_commit", _after_commit) - sa_event.listen(session, "after_rollback", _after_rollback) - - -def _record_ops(session: Session, **kwargs: t.Any) -> None: - if not has_app_context(): - return - - if not current_app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]: - return - - for targets, operation in ( - (session.new, "insert"), - (session.dirty, "update"), - (session.deleted, "delete"), - ): - for target in targets: - state = sa.inspect(target) - key = state.identity_key if state.has_identity else id(target) - session._model_changes[key] = (target, operation) - - -def _before_commit(session: Session) -> None: - if not has_app_context(): - return - - app = current_app._get_current_object() # type: ignore[attr-defined] - - if not app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]: - return - - if session._model_changes: - changes = list(session._model_changes.values()) - before_models_committed.send(app, changes=changes) - - -def _after_commit(session: Session) -> None: - if not has_app_context(): - return - - app = current_app._get_current_object() # type: ignore[attr-defined] - - if not app.config["SQLALCHEMY_TRACK_MODIFICATIONS"]: - return - - if session._model_changes: - changes = list(session._model_changes.values()) - models_committed.send(app, changes=changes) - session._model_changes.clear() - - -def _after_rollback(session: Session) -> None: - session._model_changes.clear() diff --git a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/AUTHORS b/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/AUTHORS deleted file mode 100644 index 42a5c22..0000000 --- a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/AUTHORS +++ /dev/null @@ -1,51 +0,0 @@ -Original Authors ----------------- -* Armin Rigo -* Christian Tismer - -Contributors ------------- -* Al Stone -* Alexander Schmidt -* Alexey Borzenkov -* Andreas Schwab -* Armin Ronacher -* Bin Wang -* Bob Ippolito -* ChangBo Guo -* Christoph Gohlke -* Denis Bilenko -* Dirk Mueller -* Donovan Preston -* Fantix King -* Floris Bruynooghe -* Fredrik Fornwall -* Gerd Woetzel -* Giel van Schijndel -* Gökhan Karabulut -* Gustavo Niemeyer -* Guy Rozendorn -* Hye-Shik Chang -* Jared Kuolt -* Jason Madden -* Josh Snyder -* Kyle Ambroff -* Laszlo Boszormenyi -* Mao Han -* Marc Abramowitz -* Marc Schlaich -* Marcin Bachry -* Matt Madison -* Matt Turner -* Michael Ellerman -* Michael Matz -* Ralf Schmitt -* Robie Basak -* Ronny Pfannschmidt -* Samual M. Rushing -* Tony Bowles -* Tony Breeds -* Trevor Bowen -* Tulio Magno Quites Machado Filho -* Ulrich Weigand -* Victor Stinner diff --git a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/INSTALLER b/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/LICENSE b/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/LICENSE deleted file mode 100644 index b73a4a1..0000000 --- a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/LICENSE +++ /dev/null @@ -1,30 +0,0 @@ -The following files are derived from Stackless Python and are subject to the -same license as Stackless Python: - - src/greenlet/slp_platformselect.h - files in src/greenlet/platform/ directory - -See LICENSE.PSF and http://www.stackless.com/ for details. - -Unless otherwise noted, the files in greenlet have been released under the -following MIT license: - -Copyright (c) Armin Rigo, Christian Tismer and contributors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. diff --git a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/LICENSE.PSF b/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/LICENSE.PSF deleted file mode 100644 index d3b509a..0000000 --- a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/LICENSE.PSF +++ /dev/null @@ -1,47 +0,0 @@ -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011 Python Software Foundation; All Rights Reserved" are retained in Python -alone or in any derivative version prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. diff --git a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/METADATA b/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/METADATA deleted file mode 100644 index 1529410..0000000 --- a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/METADATA +++ /dev/null @@ -1,103 +0,0 @@ -Metadata-Version: 2.1 -Name: greenlet -Version: 3.1.1 -Summary: Lightweight in-process concurrent programming -Home-page: https://greenlet.readthedocs.io/ -Author: Alexey Borzenkov -Author-email: snaury@gmail.com -Maintainer: Jason Madden -Maintainer-email: jason@seecoresoftware.com -License: MIT License -Project-URL: Bug Tracker, https://github.com/python-greenlet/greenlet/issues -Project-URL: Source Code, https://github.com/python-greenlet/greenlet/ -Project-URL: Documentation, https://greenlet.readthedocs.io/ -Keywords: greenlet coroutine concurrency threads cooperative -Platform: any -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Natural Language :: English -Classifier: Programming Language :: C -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Operating System :: OS Independent -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Requires-Python: >=3.7 -Description-Content-Type: text/x-rst -License-File: LICENSE -License-File: LICENSE.PSF -License-File: AUTHORS -Provides-Extra: docs -Requires-Dist: Sphinx ; extra == 'docs' -Requires-Dist: furo ; extra == 'docs' -Provides-Extra: test -Requires-Dist: objgraph ; extra == 'test' -Requires-Dist: psutil ; extra == 'test' - -.. This file is included into docs/history.rst - - -Greenlets are lightweight coroutines for in-process concurrent -programming. - -The "greenlet" package is a spin-off of `Stackless`_, a version of -CPython that supports micro-threads called "tasklets". Tasklets run -pseudo-concurrently (typically in a single or a few OS-level threads) -and are synchronized with data exchanges on "channels". - -A "greenlet", on the other hand, is a still more primitive notion of -micro-thread with no implicit scheduling; coroutines, in other words. -This is useful when you want to control exactly when your code runs. -You can build custom scheduled micro-threads on top of greenlet; -however, it seems that greenlets are useful on their own as a way to -make advanced control flow structures. For example, we can recreate -generators; the difference with Python's own generators is that our -generators can call nested functions and the nested functions can -yield values too. (Additionally, you don't need a "yield" keyword. See -the example in `test_generator.py -`_). - -Greenlets are provided as a C extension module for the regular unmodified -interpreter. - -.. _`Stackless`: http://www.stackless.com - - -Who is using Greenlet? -====================== - -There are several libraries that use Greenlet as a more flexible -alternative to Python's built in coroutine support: - - - `Concurrence`_ - - `Eventlet`_ - - `Gevent`_ - -.. _Concurrence: http://opensource.hyves.org/concurrence/ -.. _Eventlet: http://eventlet.net/ -.. _Gevent: http://www.gevent.org/ - -Getting Greenlet -================ - -The easiest way to get Greenlet is to install it with pip:: - - pip install greenlet - - -Source code archives and binary distributions are available on the -python package index at https://pypi.org/project/greenlet - -The source code repository is hosted on github: -https://github.com/python-greenlet/greenlet - -Documentation is available on readthedocs.org: -https://greenlet.readthedocs.io diff --git a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/RECORD b/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/RECORD deleted file mode 100644 index 01640ea..0000000 --- a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/RECORD +++ /dev/null @@ -1,122 +0,0 @@ -../../include/site/python3.12/greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 -greenlet-3.1.1.dist-info/AUTHORS,sha256=swW28t2knVRxRkaEQNZtO7MP9Sgnompb7B6cNgJM8Gk,849 -greenlet-3.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -greenlet-3.1.1.dist-info/LICENSE,sha256=dpgx1uXfrywggC-sz_H6-0wgJd2PYlPfpH_K1Z1NCXk,1434 -greenlet-3.1.1.dist-info/LICENSE.PSF,sha256=5f88I8EQ5JTNfXNsEP2W1GJFe6_soxCEDbZScpjH1Gs,2424 -greenlet-3.1.1.dist-info/METADATA,sha256=98pubBCLPmBVc3KVRmetjFyd8jA9e9BXhD_07cAcccc,3933 -greenlet-3.1.1.dist-info/RECORD,, -greenlet-3.1.1.dist-info/WHEEL,sha256=3vidnDuZ-QSnHIxLhNbI1gIM-KgyEcMHuZuv1mWPd_Q,101 -greenlet-3.1.1.dist-info/top_level.txt,sha256=YSnRsCRoO61JGlP57o8iKL6rdLWDWuiyKD8ekpWUsDc,9 -greenlet/CObjects.cpp,sha256=OPej1bWBgc4sRrTRQ2aFFML9pzDYKlKhlJSjsI0X_eU,3508 -greenlet/PyGreenlet.cpp,sha256=ogWsQ5VhSdItWRLLpWOgSuqYuM3QwQ4cVCxOQIgHx6E,23441 -greenlet/PyGreenlet.hpp,sha256=2ZQlOxYNoy7QwD7mppFoOXe_At56NIsJ0eNsE_hoSsw,1463 -greenlet/PyGreenletUnswitchable.cpp,sha256=PQE0fSZa_IOyUM44IESHkJoD2KtGW3dkhkmZSYY3WHs,4375 -greenlet/PyModule.cpp,sha256=J2TH06dGcNEarioS6NbWXkdME8hJY05XVbdqLrfO5w4,8587 -greenlet/TBrokenGreenlet.cpp,sha256=smN26uC7ahAbNYiS10rtWPjCeTG4jevM8siA2sjJiXg,1021 -greenlet/TExceptionState.cpp,sha256=U7Ctw9fBdNraS0d174MoQW7bN-ae209Ta0JuiKpcpVI,1359 -greenlet/TGreenlet.cpp,sha256=HGYGKpmKYqQ842tASW-QaaV8wua4a5XV_quYKPDsV_Y,25731 -greenlet/TGreenlet.hpp,sha256=mMHcb_rSuozdDiGJjX3GgyYkWgVM4kuO1UgbUP84BlU,27869 -greenlet/TGreenletGlobals.cpp,sha256=YyEmDjKf1g32bsL-unIUScFLnnA1fzLWf2gOMd-D0Zw,3264 -greenlet/TMainGreenlet.cpp,sha256=fvgb8HHB-FVTPEKjR1s_ifCZSpp5D5YQByik0CnIABg,3276 -greenlet/TPythonState.cpp,sha256=FxRdi76lTGXaQKWwkq82VaCfIRdF2Z-fh-TlRTMjYqg,15359 -greenlet/TStackState.cpp,sha256=V444I8Jj9DhQz-9leVW_9dtiSRjaE1NMlgDG02Xxq-Y,7381 -greenlet/TThreadState.hpp,sha256=2Jgg7DtGggMYR_x3CLAvAFf1mIdIDtQvSSItcdmX4ZQ,19131 -greenlet/TThreadStateCreator.hpp,sha256=uYTexDWooXSSgUc5uh-Mhm5BQi3-kR6CqpizvNynBFQ,2610 -greenlet/TThreadStateDestroy.cpp,sha256=wt7lQwLI0mi_JtnZB_jB4bUmfCa5b6nQhA7XOmnI1yk,9568 -greenlet/TUserGreenlet.cpp,sha256=uemg0lwKXtYB0yzmvyYdIIAsKnNkifXM1OJ2OlrFP1A,23553 -greenlet/__init__.py,sha256=OOmvT6_vn_SekdPzkj4qm6hjfikXMmdNZYDmGTOaRNo,1723 -greenlet/__pycache__/__init__.cpython-312.pyc,, -greenlet/_greenlet.cp312-win_amd64.pyd,sha256=JFaHsWu81i5kpGz46CSTSo6rBwIsDU6bjcN12nzBT9c,219136 -greenlet/greenlet.cpp,sha256=WdItb1yWL9WNsTqJNf0Iw8ZwDHD49pkDP0rIRGBg2pw,10996 -greenlet/greenlet.h,sha256=sz5pYRSQqedgOt2AMgxLZdTjO-qcr_JMvgiEJR9IAJ8,4755 -greenlet/greenlet_allocator.hpp,sha256=kxyWW4Qdwlrc7ufgdb5vd6Y7jhauQ699Kod0mqiO1iM,1582 -greenlet/greenlet_compiler_compat.hpp,sha256=nRxpLN9iNbnLVyFDeVmOwyeeNm6scQrOed1l7JQYMCM,4346 -greenlet/greenlet_cpython_add_pending.hpp,sha256=apAwIhGlgYrnYn03zWL6Sxy68kltDeb1e0QupZfb3DQ,6043 -greenlet/greenlet_cpython_compat.hpp,sha256=L_jig3dm2bsJWRazrhlokma2NfnwixoQ0cydshh6ce4,3964 -greenlet/greenlet_exceptions.hpp,sha256=06Bx81DtVaJTa6RtiMcV141b-XHv4ppEgVItkblcLWY,4503 -greenlet/greenlet_internal.hpp,sha256=Ajc-_09W4xWzm9XfyXHAeQAFUgKGKsnJwYsTCoNy3ns,2709 -greenlet/greenlet_refs.hpp,sha256=OnbA91yZf3QHH6-eJccvoNDAaN-pQBMMrclFU1Ot3J4,34436 -greenlet/greenlet_slp_switch.hpp,sha256=kM1QHA2iV-gH4cFyN6lfIagHQxvJZjWOVJdIxRE3TlQ,3198 -greenlet/greenlet_thread_support.hpp,sha256=XUJ6ljWjf9OYyuOILiz8e_yHvT3fbaUiHdhiPNQUV4s,867 -greenlet/platform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -greenlet/platform/__pycache__/__init__.cpython-312.pyc,, -greenlet/platform/setup_switch_x64_masm.cmd,sha256=ZpClUJeU0ujEPSTWNSepP0W2f9XiYQKA8QKSoVou8EU,143 -greenlet/platform/switch_aarch64_gcc.h,sha256=GKC0yWNXnbK2X--X6aguRCMj2Tg7hDU1Zkl3RljDvC8,4307 -greenlet/platform/switch_alpha_unix.h,sha256=Z-SvF8JQV3oxWT8JRbL9RFu4gRFxPdJ7cviM8YayMmw,671 -greenlet/platform/switch_amd64_unix.h,sha256=EcSFCBlodEBhqhKjcJqY_5Dn_jn7pKpkJlOvp7gFXLI,2748 -greenlet/platform/switch_arm32_gcc.h,sha256=Z3KkHszdgq6uU4YN3BxvKMG2AdDnovwCCNrqGWZ1Lyo,2479 -greenlet/platform/switch_arm32_ios.h,sha256=mm5_R9aXB92hyxzFRwB71M60H6AlvHjrpTrc72Pz3l8,1892 -greenlet/platform/switch_arm64_masm.asm,sha256=4kpTtfy7rfcr8j1CpJLAK21EtZpGDAJXWRU68HEy5A8,1245 -greenlet/platform/switch_arm64_masm.obj,sha256=DmLnIB_icoEHAz1naue_pJPTZgR9ElM7-Nmztr-o9_U,746 -greenlet/platform/switch_arm64_msvc.h,sha256=RqK5MHLmXI3Q-FQ7tm32KWnbDNZKnkJdq8CR89cz640,398 -greenlet/platform/switch_csky_gcc.h,sha256=kDikyiPpewP71KoBZQO_MukDTXTXBiC7x-hF0_2DL0w,1331 -greenlet/platform/switch_loongarch64_linux.h,sha256=7M-Dhc4Q8tRbJCJhalDLwU6S9Mx8MjmN1RbTDgIvQTM,779 -greenlet/platform/switch_m68k_gcc.h,sha256=VSa6NpZhvyyvF-Q58CTIWSpEDo4FKygOyTz00whctlw,928 -greenlet/platform/switch_mips_unix.h,sha256=E0tYsqc5anDY1BhenU1l8DW-nVHC_BElzLgJw3TGtPk,1426 -greenlet/platform/switch_ppc64_aix.h,sha256=_BL0iyRr3ZA5iPlr3uk9SJ5sNRWGYLrXcZ5z-CE9anE,3860 -greenlet/platform/switch_ppc64_linux.h,sha256=0rriT5XyxPb0GqsSSn_bP9iQsnjsPbBmu0yqo5goSyQ,3815 -greenlet/platform/switch_ppc_aix.h,sha256=pHA4slEjUFP3J3SYm1TAlNPhgb2G_PAtax5cO8BEe1A,2941 -greenlet/platform/switch_ppc_linux.h,sha256=YwrlKUzxlXuiKMQqr6MFAV1bPzWnmvk6X1AqJZEpOWU,2759 -greenlet/platform/switch_ppc_macosx.h,sha256=Z6KN_ud0n6nC3ltJrNz2qtvER6vnRAVRNH9mdIDpMxY,2624 -greenlet/platform/switch_ppc_unix.h,sha256=-ZG7MSSPEA5N4qO9PQChtyEJ-Fm6qInhyZm_ZBHTtMg,2652 -greenlet/platform/switch_riscv_unix.h,sha256=Xg0wBen8Je21LWzFtLNLvUUYq6p9n_WY7AUQbiBVyyk,865 -greenlet/platform/switch_s390_unix.h,sha256=RRlGu957ybmq95qNNY4Qw1mcaoT3eBnW5KbVwu48KX8,2763 -greenlet/platform/switch_sh_gcc.h,sha256=mcRJBTu-2UBf4kZtX601qofwuDuy-Y-hnxJtrcaB7do,901 -greenlet/platform/switch_sparc_sun_gcc.h,sha256=xZish9GsMHBienUbUMsX1-ZZ-as7hs36sVhYIE3ew8Y,2797 -greenlet/platform/switch_x32_unix.h,sha256=nM98PKtzTWc1lcM7TRMUZJzskVdR1C69U1UqZRWX0GE,1509 -greenlet/platform/switch_x64_masm.asm,sha256=nu6n2sWyXuXfpPx40d9YmLfHXUc1sHgeTvX1kUzuvEM,1841 -greenlet/platform/switch_x64_masm.obj,sha256=GNtTNxYdo7idFUYsQv-mrXWgyT5EJ93-9q90lN6svtQ,1078 -greenlet/platform/switch_x64_msvc.h,sha256=LIeasyKo_vHzspdMzMHbosRhrBfKI4BkQOh4qcTHyJw,1805 -greenlet/platform/switch_x86_msvc.h,sha256=TtGOwinbFfnn6clxMNkCz8i6OmgB6kVRrShoF5iT9to,12838 -greenlet/platform/switch_x86_unix.h,sha256=VplW9H0FF0cZHw1DhJdIUs5q6YLS4cwb2nYwjF83R1s,3059 -greenlet/slp_platformselect.h,sha256=s-U-BrZ3qwwfI-6W9zWw2rb404OksZYbxYC2w5kSMXM,3841 -greenlet/tests/__init__.py,sha256=cj2-qpMXnlVRLbMLX-rPNNMVJ42ZssdxHd84NSQ3YXw,9246 -greenlet/tests/__pycache__/__init__.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_clearing_run_switches.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_cpp_exception.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_initialstub_already_started.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_slp_switch.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_switch_three_greenlets.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_switch_three_greenlets2.cpython-312.pyc,, -greenlet/tests/__pycache__/fail_switch_two_greenlets.cpython-312.pyc,, -greenlet/tests/__pycache__/leakcheck.cpython-312.pyc,, -greenlet/tests/__pycache__/test_contextvars.cpython-312.pyc,, -greenlet/tests/__pycache__/test_cpp.cpython-312.pyc,, -greenlet/tests/__pycache__/test_extension_interface.cpython-312.pyc,, -greenlet/tests/__pycache__/test_gc.cpython-312.pyc,, -greenlet/tests/__pycache__/test_generator.cpython-312.pyc,, -greenlet/tests/__pycache__/test_generator_nested.cpython-312.pyc,, -greenlet/tests/__pycache__/test_greenlet.cpython-312.pyc,, -greenlet/tests/__pycache__/test_greenlet_trash.cpython-312.pyc,, -greenlet/tests/__pycache__/test_leaks.cpython-312.pyc,, -greenlet/tests/__pycache__/test_stack_saved.cpython-312.pyc,, -greenlet/tests/__pycache__/test_throw.cpython-312.pyc,, -greenlet/tests/__pycache__/test_tracing.cpython-312.pyc,, -greenlet/tests/__pycache__/test_version.cpython-312.pyc,, -greenlet/tests/__pycache__/test_weakref.cpython-312.pyc,, -greenlet/tests/_test_extension.c,sha256=vkeGA-6oeJcGILsD7oIrT1qZop2GaTOHXiNT7mcSl-0,5773 -greenlet/tests/_test_extension.cp312-win_amd64.pyd,sha256=djrk1kpwg9yqun3OBVt6Gl85whNLs3uuHMBy4NqPTEA,14336 -greenlet/tests/_test_extension_cpp.cp312-win_amd64.pyd,sha256=UKalV0Ji6FV9UFI8HX4lM-Py-BO55KBMJNsPeouv0nE,15872 -greenlet/tests/_test_extension_cpp.cpp,sha256=e0kVnaB8CCaEhE9yHtNyfqTjevsPDKKx-zgxk7PPK48,6565 -greenlet/tests/fail_clearing_run_switches.py,sha256=o433oA_nUCtOPaMEGc8VEhZIKa71imVHXFw7TsXaP8M,1263 -greenlet/tests/fail_cpp_exception.py,sha256=o_ZbipWikok8Bjc-vjiQvcb5FHh2nVW-McGKMLcMzh0,985 -greenlet/tests/fail_initialstub_already_started.py,sha256=txENn5IyzGx2p-XR1XB7qXmC8JX_4mKDEA8kYBXUQKc,1961 -greenlet/tests/fail_slp_switch.py,sha256=rJBZcZfTWR3e2ERQtPAud6YKShiDsP84PmwOJbp4ey0,524 -greenlet/tests/fail_switch_three_greenlets.py,sha256=zSitV7rkNnaoHYVzAGGLnxz-yPtohXJJzaE8ehFDQ0M,956 -greenlet/tests/fail_switch_three_greenlets2.py,sha256=FPJensn2EJxoropl03JSTVP3kgP33k04h6aDWWozrOk,1285 -greenlet/tests/fail_switch_two_greenlets.py,sha256=1CaI8s3504VbbF1vj1uBYuy-zxBHVzHPIAd1LIc8ONg,817 -greenlet/tests/leakcheck.py,sha256=inbfM7_oVzd8jIKGxCgo4JqpFZaDAnWPkSULJ8vIE1s,11964 -greenlet/tests/test_contextvars.py,sha256=0n5pR_lbpAppc5wFfK0e1SwYLM-fsSFp72B5_ArLPGE,10348 -greenlet/tests/test_cpp.py,sha256=hpxhFAdKJTpAVZP8CBGs1ZcrKdscI9BaDZk4btkI5d4,2736 -greenlet/tests/test_extension_interface.py,sha256=eJ3cwLacdK2WbsrC-4DgeyHdwLRcG4zx7rrkRtqSzC4,3829 -greenlet/tests/test_gc.py,sha256=PCOaRpIyjNnNlDogGL3FZU_lrdXuM-pv1rxeE5TP5mc,2923 -greenlet/tests/test_generator.py,sha256=tONXiTf98VGm347o1b-810daPiwdla5cbpFg6QI1R1g,1240 -greenlet/tests/test_generator_nested.py,sha256=7v4HOYrf1XZP39dk5IUMubdZ8yc3ynwZcqj9GUJyMSA,3718 -greenlet/tests/test_greenlet.py,sha256=zoAy56MtEyz5P93Iknpt2pPjNO3ePYrgM7SDE8Cw_uI,45990 -greenlet/tests/test_greenlet_trash.py,sha256=n2dBlQfOoEO1ODatFi8QdhboH3fB86YtqzcYMYOXxbw,7947 -greenlet/tests/test_leaks.py,sha256=wskLqCAvqZ3qTZkam_wXzd-E5zelUjlXS5Ss8KshtZY,17465 -greenlet/tests/test_stack_saved.py,sha256=eyzqNY2VCGuGlxhT_In6TvZ6Okb0AXFZVyBEnK1jDwA,446 -greenlet/tests/test_throw.py,sha256=u2TQ_WvvCd6N6JdXWIxVEcXkKu5fepDlz9dktYdmtng,3712 -greenlet/tests/test_tracing.py,sha256=VlwzMU0C1noospZhuUMyB7MHw200emIvGCN_6G2p2ZU,8250 -greenlet/tests/test_version.py,sha256=O9DpAITsOFgiRcjd4odQ7ejmwx_N9Q1zQENVcbtFHIc,1339 -greenlet/tests/test_weakref.py,sha256=F8M23btEF87bIbpptLNBORosbQqNZGiYeKMqYjWrsak,883 diff --git a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/WHEEL b/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/WHEEL deleted file mode 100644 index e4b52df..0000000 --- a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: setuptools (75.1.0) -Root-Is-Purelib: false -Tag: cp312-cp312-win_amd64 - diff --git a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/top_level.txt b/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/top_level.txt deleted file mode 100644 index 46725be..0000000 --- a/myenv/Lib/site-packages/greenlet-3.1.1.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -greenlet diff --git a/myenv/Lib/site-packages/greenlet/CObjects.cpp b/myenv/Lib/site-packages/greenlet/CObjects.cpp deleted file mode 100644 index c135995..0000000 --- a/myenv/Lib/site-packages/greenlet/CObjects.cpp +++ /dev/null @@ -1,157 +0,0 @@ -#ifndef COBJECTS_CPP -#define COBJECTS_CPP -/***************************************************************************** - * C interface - * - * These are exported using the CObject API - */ -#ifdef __clang__ -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wunused-function" -#endif - -#include "greenlet_exceptions.hpp" - -#include "greenlet_internal.hpp" -#include "greenlet_refs.hpp" - - -#include "TThreadStateDestroy.cpp" - -#include "PyGreenlet.hpp" - -using greenlet::PyErrOccurred; -using greenlet::Require; - - - -extern "C" { -static PyGreenlet* -PyGreenlet_GetCurrent(void) -{ - return GET_THREAD_STATE().state().get_current().relinquish_ownership(); -} - -static int -PyGreenlet_SetParent(PyGreenlet* g, PyGreenlet* nparent) -{ - return green_setparent((PyGreenlet*)g, (PyObject*)nparent, NULL); -} - -static PyGreenlet* -PyGreenlet_New(PyObject* run, PyGreenlet* parent) -{ - using greenlet::refs::NewDictReference; - // In the past, we didn't use green_new and green_init, but that - // was a maintenance issue because we duplicated code. This way is - // much safer, but slightly slower. If that's a problem, we could - // refactor green_init to separate argument parsing from initialization. - OwnedGreenlet g = OwnedGreenlet::consuming(green_new(&PyGreenlet_Type, nullptr, nullptr)); - if (!g) { - return NULL; - } - - try { - NewDictReference kwargs; - if (run) { - kwargs.SetItem(mod_globs->str_run, run); - } - if (parent) { - kwargs.SetItem("parent", (PyObject*)parent); - } - - Require(green_init(g.borrow(), mod_globs->empty_tuple, kwargs.borrow())); - } - catch (const PyErrOccurred&) { - return nullptr; - } - - return g.relinquish_ownership(); -} - -static PyObject* -PyGreenlet_Switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return NULL; - } - - if (args == NULL) { - args = mod_globs->empty_tuple; - } - - if (kwargs == NULL || !PyDict_Check(kwargs)) { - kwargs = NULL; - } - - return green_switch(self, args, kwargs); -} - -static PyObject* -PyGreenlet_Throw(PyGreenlet* self, PyObject* typ, PyObject* val, PyObject* tb) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return nullptr; - } - try { - PyErrPieces err_pieces(typ, val, tb); - return internal_green_throw(self, err_pieces).relinquish_ownership(); - } - catch (const PyErrOccurred&) { - return nullptr; - } -} - - - -static int -Extern_PyGreenlet_MAIN(PyGreenlet* self) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return -1; - } - return self->pimpl->main(); -} - -static int -Extern_PyGreenlet_ACTIVE(PyGreenlet* self) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return -1; - } - return self->pimpl->active(); -} - -static int -Extern_PyGreenlet_STARTED(PyGreenlet* self) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return -1; - } - return self->pimpl->started(); -} - -static PyGreenlet* -Extern_PyGreenlet_GET_PARENT(PyGreenlet* self) -{ - if (!PyGreenlet_Check(self)) { - PyErr_BadArgument(); - return NULL; - } - // This can return NULL even if there is no exception - return self->pimpl->parent().acquire(); -} -} // extern C. - -/** End C API ****************************************************************/ -#ifdef __clang__ -# pragma clang diagnostic pop -#endif - - -#endif diff --git a/myenv/Lib/site-packages/greenlet/PyGreenlet.cpp b/myenv/Lib/site-packages/greenlet/PyGreenlet.cpp deleted file mode 100644 index 29c0bba..0000000 --- a/myenv/Lib/site-packages/greenlet/PyGreenlet.cpp +++ /dev/null @@ -1,738 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -#ifndef PYGREENLET_CPP -#define PYGREENLET_CPP -/***************** -The Python slot functions for TGreenlet. - */ - - -#define PY_SSIZE_T_CLEAN -#include -#include "structmember.h" // PyMemberDef - -#include "greenlet_internal.hpp" -#include "TThreadStateDestroy.cpp" -#include "TGreenlet.hpp" -// #include "TUserGreenlet.cpp" -// #include "TMainGreenlet.cpp" -// #include "TBrokenGreenlet.cpp" - - -#include "greenlet_refs.hpp" -#include "greenlet_slp_switch.hpp" - -#include "greenlet_thread_support.hpp" -#include "TGreenlet.hpp" - -#include "TGreenletGlobals.cpp" -#include "TThreadStateDestroy.cpp" -#include "PyGreenlet.hpp" -// #include "TGreenlet.cpp" - -// #include "TExceptionState.cpp" -// #include "TPythonState.cpp" -// #include "TStackState.cpp" - -using greenlet::LockGuard; -using greenlet::LockInitError; -using greenlet::PyErrOccurred; -using greenlet::Require; - -using greenlet::g_handle_exit; -using greenlet::single_result; - -using greenlet::Greenlet; -using greenlet::UserGreenlet; -using greenlet::MainGreenlet; -using greenlet::BrokenGreenlet; -using greenlet::ThreadState; -using greenlet::PythonState; - - - -static PyGreenlet* -green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)) -{ - PyGreenlet* o = - (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict); - if (o) { - new UserGreenlet(o, GET_THREAD_STATE().state().borrow_current()); - assert(Py_REFCNT(o) == 1); - } - return o; -} - - -// green_init is used in the tp_init slot. So it's important that -// it can be called directly from CPython. Thus, we don't use -// BorrowedGreenlet and BorrowedObject --- although in theory -// these should be binary layout compatible, that may not be -// guaranteed to be the case (32-bit linux ppc possibly). -static int -green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs) -{ - PyArgParseParam run; - PyArgParseParam nparent; - static const char* kwlist[] = { - "run", - "parent", - NULL - }; - - // recall: The O specifier does NOT increase the reference count. - if (!PyArg_ParseTupleAndKeywords( - args, kwargs, "|OO:green", (char**)kwlist, &run, &nparent)) { - return -1; - } - - if (run) { - if (green_setrun(self, run, NULL)) { - return -1; - } - } - if (nparent && !nparent.is_None()) { - return green_setparent(self, nparent, NULL); - } - return 0; -} - - - -static int -green_traverse(PyGreenlet* self, visitproc visit, void* arg) -{ - // We must only visit referenced objects, i.e. only objects - // Py_INCREF'ed by this greenlet (directly or indirectly): - // - // - stack_prev is not visited: holds previous stack pointer, but it's not - // referenced - // - frames are not visited as we don't strongly reference them; - // alive greenlets are not garbage collected - // anyway. This can be a problem, however, if this greenlet is - // never allowed to finish, and is referenced from the frame: we - // have an uncollectible cycle in that case. Note that the - // frame object itself is also frequently not even tracked by the GC - // starting with Python 3.7 (frames are allocated by the - // interpreter untracked, and only become tracked when their - // evaluation is finished if they have a refcount > 1). All of - // this is to say that we should probably strongly reference - // the frame object. Doing so, while always allowing GC on a - // greenlet, solves several leaks for us. - - Py_VISIT(self->dict); - if (!self->pimpl) { - // Hmm. I have seen this at interpreter shutdown time, - // I think. That's very odd because this doesn't go away until - // we're ``green_dealloc()``, at which point we shouldn't be - // traversed anymore. - return 0; - } - - return self->pimpl->tp_traverse(visit, arg); -} - -static int -green_is_gc(PyObject* _self) -{ - BorrowedGreenlet self(_self); - int result = 0; - /* Main greenlet can be garbage collected since it can only - become unreachable if the underlying thread exited. - Active greenlets --- including those that are suspended --- - cannot be garbage collected, however. - */ - if (self->main() || !self->active()) { - result = 1; - } - // The main greenlet pointer will eventually go away after the thread dies. - if (self->was_running_in_dead_thread()) { - // Our thread is dead! We can never run again. Might as well - // GC us. Note that if a tuple containing only us and other - // immutable objects had been scanned before this, when we - // would have returned 0, the tuple will take itself out of GC - // tracking and never be investigated again. So that could - // result in both us and the tuple leaking due to an - // unreachable/uncollectible reference. The same goes for - // dictionaries. - // - // It's not a great idea to be changing our GC state on the - // fly. - result = 1; - } - return result; -} - - -static int -green_clear(PyGreenlet* self) -{ - /* Greenlet is only cleared if it is about to be collected. - Since active greenlets are not garbage collectable, we can - be sure that, even if they are deallocated during clear, - nothing they reference is in unreachable or finalizers, - so even if it switches we are relatively safe. */ - // XXX: Are we responsible for clearing weakrefs here? - Py_CLEAR(self->dict); - return self->pimpl->tp_clear(); -} - -/** - * Returns 0 on failure (the object was resurrected) or 1 on success. - **/ -static int -_green_dealloc_kill_started_non_main_greenlet(BorrowedGreenlet self) -{ - /* Hacks hacks hacks copied from instance_dealloc() */ - /* Temporarily resurrect the greenlet. */ - assert(self.REFCNT() == 0); - Py_SET_REFCNT(self.borrow(), 1); - /* Save the current exception, if any. */ - PyErrPieces saved_err; - try { - // BY THE TIME WE GET HERE, the state may actually be going - // away - // if we're shutting down the interpreter and freeing thread - // entries, - // this could result in freeing greenlets that were leaked. So - // we can't try to read the state. - self->deallocing_greenlet_in_thread( - self->thread_state() - ? static_cast(GET_THREAD_STATE()) - : nullptr); - } - catch (const PyErrOccurred&) { - PyErr_WriteUnraisable(self.borrow_o()); - /* XXX what else should we do? */ - } - /* Check for no resurrection must be done while we keep - * our internal reference, otherwise PyFile_WriteObject - * causes recursion if using Py_INCREF/Py_DECREF - */ - if (self.REFCNT() == 1 && self->active()) { - /* Not resurrected, but still not dead! - XXX what else should we do? we complain. */ - PyObject* f = PySys_GetObject("stderr"); - Py_INCREF(self.borrow_o()); /* leak! */ - if (f != NULL) { - PyFile_WriteString("GreenletExit did not kill ", f); - PyFile_WriteObject(self.borrow_o(), f, 0); - PyFile_WriteString("\n", f); - } - } - /* Restore the saved exception. */ - saved_err.PyErrRestore(); - /* Undo the temporary resurrection; can't use DECREF here, - * it would cause a recursive call. - */ - assert(self.REFCNT() > 0); - - Py_ssize_t refcnt = self.REFCNT() - 1; - Py_SET_REFCNT(self.borrow_o(), refcnt); - if (refcnt != 0) { - /* Resurrected! */ - _Py_NewReference(self.borrow_o()); - Py_SET_REFCNT(self.borrow_o(), refcnt); - /* Better to use tp_finalizer slot (PEP 442) - * and call ``PyObject_CallFinalizerFromDealloc``, - * but that's only supported in Python 3.4+; see - * Modules/_io/iobase.c for an example. - * - * The following approach is copied from iobase.c in CPython 2.7. - * (along with much of this function in general). Here's their - * comment: - * - * When called from a heap type's dealloc, the type will be - * decref'ed on return (see e.g. subtype_dealloc in typeobject.c). */ - if (PyType_HasFeature(self.TYPE(), Py_TPFLAGS_HEAPTYPE)) { - Py_INCREF(self.TYPE()); - } - - PyObject_GC_Track((PyObject*)self); - - _Py_DEC_REFTOTAL; -#ifdef COUNT_ALLOCS - --Py_TYPE(self)->tp_frees; - --Py_TYPE(self)->tp_allocs; -#endif /* COUNT_ALLOCS */ - return 0; - } - return 1; -} - - -static void -green_dealloc(PyGreenlet* self) -{ - PyObject_GC_UnTrack(self); - BorrowedGreenlet me(self); - if (me->active() - && me->started() - && !me->main()) { - if (!_green_dealloc_kill_started_non_main_greenlet(me)) { - return; - } - } - - if (self->weakreflist != NULL) { - PyObject_ClearWeakRefs((PyObject*)self); - } - Py_CLEAR(self->dict); - - if (self->pimpl) { - // In case deleting this, which frees some memory, - // somehow winds up calling back into us. That's usually a - //bug in our code. - Greenlet* p = self->pimpl; - self->pimpl = nullptr; - delete p; - } - // and finally we're done. self is now invalid. - Py_TYPE(self)->tp_free((PyObject*)self); -} - - - -static OwnedObject -internal_green_throw(BorrowedGreenlet self, PyErrPieces& err_pieces) -{ - PyObject* result = nullptr; - err_pieces.PyErrRestore(); - assert(PyErr_Occurred()); - if (self->started() && !self->active()) { - /* dead greenlet: turn GreenletExit into a regular return */ - result = g_handle_exit(OwnedObject()).relinquish_ownership(); - } - self->args() <<= result; - - return single_result(self->g_switch()); -} - - - -PyDoc_STRVAR( - green_switch_doc, - "switch(*args, **kwargs)\n" - "\n" - "Switch execution to this greenlet.\n" - "\n" - "If this greenlet has never been run, then this greenlet\n" - "will be switched to using the body of ``self.run(*args, **kwargs)``.\n" - "\n" - "If the greenlet is active (has been run, but was switch()'ed\n" - "out before leaving its run function), then this greenlet will\n" - "be resumed and the return value to its switch call will be\n" - "None if no arguments are given, the given argument if one\n" - "argument is given, or the args tuple and keyword args dict if\n" - "multiple arguments are given.\n" - "\n" - "If the greenlet is dead, or is the current greenlet then this\n" - "function will simply return the arguments using the same rules as\n" - "above.\n"); - -static PyObject* -green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs) -{ - using greenlet::SwitchingArgs; - SwitchingArgs switch_args(OwnedObject::owning(args), OwnedObject::owning(kwargs)); - self->pimpl->may_switch_away(); - self->pimpl->args() <<= switch_args; - - // If we're switching out of a greenlet, and that switch is the - // last thing the greenlet does, the greenlet ought to be able to - // go ahead and die at that point. Currently, someone else must - // manually switch back to the greenlet so that we "fall off the - // end" and can perform cleanup. You'd think we'd be able to - // figure out that this is happening using the frame's ``f_lasti`` - // member, which is supposed to be an index into - // ``frame->f_code->co_code``, the bytecode string. However, in - // recent interpreters, ``f_lasti`` tends not to be updated thanks - // to things like the PREDICT() macros in ceval.c. So it doesn't - // really work to do that in many cases. For example, the Python - // code: - // def run(): - // greenlet.getcurrent().parent.switch() - // produces bytecode of len 16, with the actual call to switch() - // being at index 10 (in Python 3.10). However, the reported - // ``f_lasti`` we actually see is...5! (Which happens to be the - // second byte of the CALL_METHOD op for ``getcurrent()``). - - try { - //OwnedObject result = single_result(self->pimpl->g_switch()); - OwnedObject result(single_result(self->pimpl->g_switch())); -#ifndef NDEBUG - // Note that the current greenlet isn't necessarily self. If self - // finished, we went to one of its parents. - assert(!self->pimpl->args()); - - const BorrowedGreenlet& current = GET_THREAD_STATE().state().borrow_current(); - // It's possible it's never been switched to. - assert(!current->args()); -#endif - PyObject* p = result.relinquish_ownership(); - - if (!p && !PyErr_Occurred()) { - // This shouldn't be happening anymore, so the asserts - // are there for debug builds. Non-debug builds - // crash "gracefully" in this case, although there is an - // argument to be made for killing the process in all - // cases --- for this to be the case, our switches - // probably nested in an incorrect way, so the state is - // suspicious. Nothing should be corrupt though, just - // confused at the Python level. Letting this propagate is - // probably good enough. - assert(p || PyErr_Occurred()); - throw PyErrOccurred( - mod_globs->PyExc_GreenletError, - "Greenlet.switch() returned NULL without an exception set." - ); - } - return p; - } - catch(const PyErrOccurred&) { - return nullptr; - } -} - -PyDoc_STRVAR( - green_throw_doc, - "Switches execution to this greenlet, but immediately raises the\n" - "given exception in this greenlet. If no argument is provided, the " - "exception\n" - "defaults to `greenlet.GreenletExit`. The normal exception\n" - "propagation rules apply, as described for `switch`. Note that calling " - "this\n" - "method is almost equivalent to the following::\n" - "\n" - " def raiser():\n" - " raise typ, val, tb\n" - " g_raiser = greenlet(raiser, parent=g)\n" - " g_raiser.switch()\n" - "\n" - "except that this trick does not work for the\n" - "`greenlet.GreenletExit` exception, which would not propagate\n" - "from ``g_raiser`` to ``g``.\n"); - -static PyObject* -green_throw(PyGreenlet* self, PyObject* args) -{ - PyArgParseParam typ(mod_globs->PyExc_GreenletExit); - PyArgParseParam val; - PyArgParseParam tb; - - if (!PyArg_ParseTuple(args, "|OOO:throw", &typ, &val, &tb)) { - return nullptr; - } - - assert(typ.borrow() || val.borrow()); - - self->pimpl->may_switch_away(); - try { - // Both normalizing the error and the actual throw_greenlet - // could throw PyErrOccurred. - PyErrPieces err_pieces(typ.borrow(), val.borrow(), tb.borrow()); - - return internal_green_throw(self, err_pieces).relinquish_ownership(); - } - catch (const PyErrOccurred&) { - return nullptr; - } -} - -static int -green_bool(PyGreenlet* self) -{ - return self->pimpl->active(); -} - -/** - * CAUTION: Allocates memory, may run GC and arbitrary Python code. - */ -static PyObject* -green_getdict(PyGreenlet* self, void* UNUSED(context)) -{ - if (self->dict == NULL) { - self->dict = PyDict_New(); - if (self->dict == NULL) { - return NULL; - } - } - Py_INCREF(self->dict); - return self->dict; -} - -static int -green_setdict(PyGreenlet* self, PyObject* val, void* UNUSED(context)) -{ - PyObject* tmp; - - if (val == NULL) { - PyErr_SetString(PyExc_TypeError, "__dict__ may not be deleted"); - return -1; - } - if (!PyDict_Check(val)) { - PyErr_SetString(PyExc_TypeError, "__dict__ must be a dictionary"); - return -1; - } - tmp = self->dict; - Py_INCREF(val); - self->dict = val; - Py_XDECREF(tmp); - return 0; -} - -static bool -_green_not_dead(BorrowedGreenlet self) -{ - // XXX: Where else should we do this? - // Probably on entry to most Python-facing functions? - if (self->was_running_in_dead_thread()) { - self->deactivate_and_free(); - return false; - } - return self->active() || !self->started(); -} - - -static PyObject* -green_getdead(PyGreenlet* self, void* UNUSED(context)) -{ - if (_green_not_dead(self)) { - Py_RETURN_FALSE; - } - else { - Py_RETURN_TRUE; - } -} - -static PyObject* -green_get_stack_saved(PyGreenlet* self, void* UNUSED(context)) -{ - return PyLong_FromSsize_t(self->pimpl->stack_saved()); -} - - -static PyObject* -green_getrun(PyGreenlet* self, void* UNUSED(context)) -{ - try { - OwnedObject result(BorrowedGreenlet(self)->run()); - return result.relinquish_ownership(); - } - catch(const PyErrOccurred&) { - return nullptr; - } -} - - -static int -green_setrun(PyGreenlet* self, PyObject* nrun, void* UNUSED(context)) -{ - try { - BorrowedGreenlet(self)->run(nrun); - return 0; - } - catch(const PyErrOccurred&) { - return -1; - } -} - -static PyObject* -green_getparent(PyGreenlet* self, void* UNUSED(context)) -{ - return BorrowedGreenlet(self)->parent().acquire_or_None(); -} - - -static int -green_setparent(PyGreenlet* self, PyObject* nparent, void* UNUSED(context)) -{ - try { - BorrowedGreenlet(self)->parent(nparent); - } - catch(const PyErrOccurred&) { - return -1; - } - return 0; -} - - -static PyObject* -green_getcontext(const PyGreenlet* self, void* UNUSED(context)) -{ - const Greenlet *const g = self->pimpl; - try { - OwnedObject result(g->context()); - return result.relinquish_ownership(); - } - catch(const PyErrOccurred&) { - return nullptr; - } -} - -static int -green_setcontext(PyGreenlet* self, PyObject* nctx, void* UNUSED(context)) -{ - try { - BorrowedGreenlet(self)->context(nctx); - return 0; - } - catch(const PyErrOccurred&) { - return -1; - } -} - - -static PyObject* -green_getframe(PyGreenlet* self, void* UNUSED(context)) -{ - const PythonState::OwnedFrame& top_frame = BorrowedGreenlet(self)->top_frame(); - return top_frame.acquire_or_None(); -} - - -static PyObject* -green_getstate(PyGreenlet* self) -{ - PyErr_Format(PyExc_TypeError, - "cannot serialize '%s' object", - Py_TYPE(self)->tp_name); - return nullptr; -} - -static PyObject* -green_repr(PyGreenlet* _self) -{ - BorrowedGreenlet self(_self); - /* - Return a string like - - - The handling of greenlets across threads is not super good. - We mostly use the internal definitions of these terms, but they - generally should make sense to users as well. - */ - PyObject* result; - int never_started = !self->started() && !self->active(); - - const char* const tp_name = Py_TYPE(self)->tp_name; - - if (_green_not_dead(self)) { - /* XXX: The otid= is almost useless because you can't correlate it to - any thread identifier exposed to Python. We could use - PyThreadState_GET()->thread_id, but we'd need to save that in the - greenlet, or save the whole PyThreadState object itself. - - As it stands, its only useful for identifying greenlets from the same thread. - */ - const char* state_in_thread; - if (self->was_running_in_dead_thread()) { - // The thread it was running in is dead! - // This can happen, especially at interpreter shut down. - // It complicates debugging output because it may be - // impossible to access the current thread state at that - // time. Thus, don't access the current thread state. - state_in_thread = " (thread exited)"; - } - else { - state_in_thread = GET_THREAD_STATE().state().is_current(self) - ? " current" - : (self->started() ? " suspended" : ""); - } - result = PyUnicode_FromFormat( - "<%s object at %p (otid=%p)%s%s%s%s>", - tp_name, - self.borrow_o(), - self->thread_state(), - state_in_thread, - self->active() ? " active" : "", - never_started ? " pending" : " started", - self->main() ? " main" : "" - ); - } - else { - result = PyUnicode_FromFormat( - "<%s object at %p (otid=%p) %sdead>", - tp_name, - self.borrow_o(), - self->thread_state(), - self->was_running_in_dead_thread() - ? "(thread exited) " - : "" - ); - } - - return result; -} - - -static PyMethodDef green_methods[] = { - { - .ml_name="switch", - .ml_meth=reinterpret_cast(green_switch), - .ml_flags=METH_VARARGS | METH_KEYWORDS, - .ml_doc=green_switch_doc - }, - {.ml_name="throw", .ml_meth=(PyCFunction)green_throw, .ml_flags=METH_VARARGS, .ml_doc=green_throw_doc}, - {.ml_name="__getstate__", .ml_meth=(PyCFunction)green_getstate, .ml_flags=METH_NOARGS, .ml_doc=NULL}, - {.ml_name=NULL, .ml_meth=NULL} /* sentinel */ -}; - -static PyGetSetDef green_getsets[] = { - /* name, getter, setter, doc, context pointer */ - {.name="__dict__", .get=(getter)green_getdict, .set=(setter)green_setdict}, - {.name="run", .get=(getter)green_getrun, .set=(setter)green_setrun}, - {.name="parent", .get=(getter)green_getparent, .set=(setter)green_setparent}, - {.name="gr_frame", .get=(getter)green_getframe }, - { - .name="gr_context", - .get=(getter)green_getcontext, - .set=(setter)green_setcontext - }, - {.name="dead", .get=(getter)green_getdead}, - {.name="_stack_saved", .get=(getter)green_get_stack_saved}, - {.name=NULL} -}; - -static PyMemberDef green_members[] = { - {.name=NULL} -}; - -static PyNumberMethods green_as_number = { - .nb_bool=(inquiry)green_bool, -}; - - -PyTypeObject PyGreenlet_Type = { - .ob_base=PyVarObject_HEAD_INIT(NULL, 0) - .tp_name="greenlet.greenlet", /* tp_name */ - .tp_basicsize=sizeof(PyGreenlet), /* tp_basicsize */ - /* methods */ - .tp_dealloc=(destructor)green_dealloc, /* tp_dealloc */ - .tp_repr=(reprfunc)green_repr, /* tp_repr */ - .tp_as_number=&green_as_number, /* tp_as _number*/ - .tp_flags=G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - .tp_doc="greenlet(run=None, parent=None) -> greenlet\n\n" - "Creates a new greenlet object (without running it).\n\n" - " - *run* -- The callable to invoke.\n" - " - *parent* -- The parent greenlet. The default is the current " - "greenlet.", /* tp_doc */ - .tp_traverse=(traverseproc)green_traverse, /* tp_traverse */ - .tp_clear=(inquiry)green_clear, /* tp_clear */ - .tp_weaklistoffset=offsetof(PyGreenlet, weakreflist), /* tp_weaklistoffset */ - - .tp_methods=green_methods, /* tp_methods */ - .tp_members=green_members, /* tp_members */ - .tp_getset=green_getsets, /* tp_getset */ - .tp_dictoffset=offsetof(PyGreenlet, dict), /* tp_dictoffset */ - .tp_init=(initproc)green_init, /* tp_init */ - .tp_alloc=PyType_GenericAlloc, /* tp_alloc */ - .tp_new=(newfunc)green_new, /* tp_new */ - .tp_free=PyObject_GC_Del, /* tp_free */ - .tp_is_gc=(inquiry)green_is_gc, /* tp_is_gc */ -}; - -#endif - -// Local Variables: -// flycheck-clang-include-path: ("/opt/local/Library/Frameworks/Python.framework/Versions/3.8/include/python3.8") -// End: diff --git a/myenv/Lib/site-packages/greenlet/PyGreenlet.hpp b/myenv/Lib/site-packages/greenlet/PyGreenlet.hpp deleted file mode 100644 index df6cd80..0000000 --- a/myenv/Lib/site-packages/greenlet/PyGreenlet.hpp +++ /dev/null @@ -1,35 +0,0 @@ -#ifndef PYGREENLET_HPP -#define PYGREENLET_HPP - - -#include "greenlet.h" -#include "greenlet_compiler_compat.hpp" -#include "greenlet_refs.hpp" - - -using greenlet::refs::OwnedGreenlet; -using greenlet::refs::BorrowedGreenlet; -using greenlet::refs::BorrowedObject;; -using greenlet::refs::OwnedObject; -using greenlet::refs::PyErrPieces; - - -// XXX: These doesn't really belong here, it's not a Python slot. -static OwnedObject internal_green_throw(BorrowedGreenlet self, PyErrPieces& err_pieces); - -static PyGreenlet* green_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)); -static int green_clear(PyGreenlet* self); -static int green_init(PyGreenlet* self, PyObject* args, PyObject* kwargs); -static int green_setparent(PyGreenlet* self, PyObject* nparent, void* UNUSED(context)); -static int green_setrun(PyGreenlet* self, PyObject* nrun, void* UNUSED(context)); -static int green_traverse(PyGreenlet* self, visitproc visit, void* arg); -static void green_dealloc(PyGreenlet* self); -static PyObject* green_getparent(PyGreenlet* self, void* UNUSED(context)); - -static int green_is_gc(PyObject* self); -static PyObject* green_getdead(PyGreenlet* self, void* UNUSED(context)); -static PyObject* green_getrun(PyGreenlet* self, void* UNUSED(context)); -static int green_setcontext(PyGreenlet* self, PyObject* nctx, void* UNUSED(context)); -static PyObject* green_getframe(PyGreenlet* self, void* UNUSED(context)); -static PyObject* green_repr(PyGreenlet* self); -#endif diff --git a/myenv/Lib/site-packages/greenlet/PyGreenletUnswitchable.cpp b/myenv/Lib/site-packages/greenlet/PyGreenletUnswitchable.cpp deleted file mode 100644 index 1b768ee..0000000 --- a/myenv/Lib/site-packages/greenlet/PyGreenletUnswitchable.cpp +++ /dev/null @@ -1,147 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - Implementation of the Python slots for PyGreenletUnswitchable_Type -*/ -#ifndef PY_GREENLET_UNSWITCHABLE_CPP -#define PY_GREENLET_UNSWITCHABLE_CPP - - - -#define PY_SSIZE_T_CLEAN -#include -#include "structmember.h" // PyMemberDef - -#include "greenlet_internal.hpp" -// Code after this point can assume access to things declared in stdint.h, -// including the fixed-width types. This goes for the platform-specific switch functions -// as well. -#include "greenlet_refs.hpp" -#include "greenlet_slp_switch.hpp" - -#include "greenlet_thread_support.hpp" -#include "TGreenlet.hpp" - -#include "TGreenlet.cpp" -#include "TGreenletGlobals.cpp" -#include "TThreadStateDestroy.cpp" - - -using greenlet::LockGuard; -using greenlet::LockInitError; -using greenlet::PyErrOccurred; -using greenlet::Require; - -using greenlet::g_handle_exit; -using greenlet::single_result; - -using greenlet::Greenlet; -using greenlet::UserGreenlet; -using greenlet::MainGreenlet; -using greenlet::BrokenGreenlet; -using greenlet::ThreadState; -using greenlet::PythonState; - - -#include "PyGreenlet.hpp" - -static PyGreenlet* -green_unswitchable_new(PyTypeObject* type, PyObject* UNUSED(args), PyObject* UNUSED(kwds)) -{ - PyGreenlet* o = - (PyGreenlet*)PyBaseObject_Type.tp_new(type, mod_globs->empty_tuple, mod_globs->empty_dict); - if (o) { - new BrokenGreenlet(o, GET_THREAD_STATE().state().borrow_current()); - assert(Py_REFCNT(o) == 1); - } - return o; -} - -static PyObject* -green_unswitchable_getforce(PyGreenlet* self, void* UNUSED(context)) -{ - BrokenGreenlet* broken = dynamic_cast(self->pimpl); - return PyBool_FromLong(broken->_force_switch_error); -} - -static int -green_unswitchable_setforce(PyGreenlet* self, PyObject* nforce, void* UNUSED(context)) -{ - if (!nforce) { - PyErr_SetString( - PyExc_AttributeError, - "Cannot delete force_switch_error" - ); - return -1; - } - BrokenGreenlet* broken = dynamic_cast(self->pimpl); - int is_true = PyObject_IsTrue(nforce); - if (is_true == -1) { - return -1; - } - broken->_force_switch_error = is_true; - return 0; -} - -static PyObject* -green_unswitchable_getforceslp(PyGreenlet* self, void* UNUSED(context)) -{ - BrokenGreenlet* broken = dynamic_cast(self->pimpl); - return PyBool_FromLong(broken->_force_slp_switch_error); -} - -static int -green_unswitchable_setforceslp(PyGreenlet* self, PyObject* nforce, void* UNUSED(context)) -{ - if (!nforce) { - PyErr_SetString( - PyExc_AttributeError, - "Cannot delete force_slp_switch_error" - ); - return -1; - } - BrokenGreenlet* broken = dynamic_cast(self->pimpl); - int is_true = PyObject_IsTrue(nforce); - if (is_true == -1) { - return -1; - } - broken->_force_slp_switch_error = is_true; - return 0; -} - -static PyGetSetDef green_unswitchable_getsets[] = { - /* name, getter, setter, doc, closure (context pointer) */ - { - .name="force_switch_error", - .get=(getter)green_unswitchable_getforce, - .set=(setter)green_unswitchable_setforce, - .doc=NULL - }, - { - .name="force_slp_switch_error", - .get=(getter)green_unswitchable_getforceslp, - .set=(setter)green_unswitchable_setforceslp, - .doc=nullptr - }, - {.name=nullptr} -}; - -PyTypeObject PyGreenletUnswitchable_Type = { - .ob_base=PyVarObject_HEAD_INIT(NULL, 0) - .tp_name="greenlet._greenlet.UnswitchableGreenlet", - .tp_dealloc= (destructor)green_dealloc, /* tp_dealloc */ - .tp_flags=G_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, /* tp_flags */ - .tp_doc="Undocumented internal class", /* tp_doc */ - .tp_traverse=(traverseproc)green_traverse, /* tp_traverse */ - .tp_clear=(inquiry)green_clear, /* tp_clear */ - - .tp_getset=green_unswitchable_getsets, /* tp_getset */ - .tp_base=&PyGreenlet_Type, /* tp_base */ - .tp_init=(initproc)green_init, /* tp_init */ - .tp_alloc=PyType_GenericAlloc, /* tp_alloc */ - .tp_new=(newfunc)green_unswitchable_new, /* tp_new */ - .tp_free=PyObject_GC_Del, /* tp_free */ - .tp_is_gc=(inquiry)green_is_gc, /* tp_is_gc */ -}; - - -#endif diff --git a/myenv/Lib/site-packages/greenlet/PyModule.cpp b/myenv/Lib/site-packages/greenlet/PyModule.cpp deleted file mode 100644 index 6adcb5c..0000000 --- a/myenv/Lib/site-packages/greenlet/PyModule.cpp +++ /dev/null @@ -1,292 +0,0 @@ -#ifndef PY_MODULE_CPP -#define PY_MODULE_CPP - -#include "greenlet_internal.hpp" - - -#include "TGreenletGlobals.cpp" -#include "TMainGreenlet.cpp" -#include "TThreadStateDestroy.cpp" - -using greenlet::LockGuard; -using greenlet::ThreadState; - -#ifdef __clang__ -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wunused-function" -# pragma clang diagnostic ignored "-Wunused-variable" -#endif - -PyDoc_STRVAR(mod_getcurrent_doc, - "getcurrent() -> greenlet\n" - "\n" - "Returns the current greenlet (i.e. the one which called this " - "function).\n"); - -static PyObject* -mod_getcurrent(PyObject* UNUSED(module)) -{ - return GET_THREAD_STATE().state().get_current().relinquish_ownership_o(); -} - -PyDoc_STRVAR(mod_settrace_doc, - "settrace(callback) -> object\n" - "\n" - "Sets a new tracing function and returns the previous one.\n"); -static PyObject* -mod_settrace(PyObject* UNUSED(module), PyObject* args) -{ - PyArgParseParam tracefunc; - if (!PyArg_ParseTuple(args, "O", &tracefunc)) { - return NULL; - } - ThreadState& state = GET_THREAD_STATE(); - OwnedObject previous = state.get_tracefunc(); - if (!previous) { - previous = Py_None; - } - - state.set_tracefunc(tracefunc); - - return previous.relinquish_ownership(); -} - -PyDoc_STRVAR(mod_gettrace_doc, - "gettrace() -> object\n" - "\n" - "Returns the currently set tracing function, or None.\n"); - -static PyObject* -mod_gettrace(PyObject* UNUSED(module)) -{ - OwnedObject tracefunc = GET_THREAD_STATE().state().get_tracefunc(); - if (!tracefunc) { - tracefunc = Py_None; - } - return tracefunc.relinquish_ownership(); -} - - - -PyDoc_STRVAR(mod_set_thread_local_doc, - "set_thread_local(key, value) -> None\n" - "\n" - "Set a value in the current thread-local dictionary. Debugging only.\n"); - -static PyObject* -mod_set_thread_local(PyObject* UNUSED(module), PyObject* args) -{ - PyArgParseParam key; - PyArgParseParam value; - PyObject* result = NULL; - - if (PyArg_UnpackTuple(args, "set_thread_local", 2, 2, &key, &value)) { - if(PyDict_SetItem( - PyThreadState_GetDict(), // borrow - key, - value) == 0 ) { - // success - Py_INCREF(Py_None); - result = Py_None; - } - } - return result; -} - -PyDoc_STRVAR(mod_get_pending_cleanup_count_doc, - "get_pending_cleanup_count() -> Integer\n" - "\n" - "Get the number of greenlet cleanup operations pending. Testing only.\n"); - - -static PyObject* -mod_get_pending_cleanup_count(PyObject* UNUSED(module)) -{ - LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); - return PyLong_FromSize_t(mod_globs->thread_states_to_destroy.size()); -} - -PyDoc_STRVAR(mod_get_total_main_greenlets_doc, - "get_total_main_greenlets() -> Integer\n" - "\n" - "Quickly return the number of main greenlets that exist. Testing only.\n"); - -static PyObject* -mod_get_total_main_greenlets(PyObject* UNUSED(module)) -{ - return PyLong_FromSize_t(G_TOTAL_MAIN_GREENLETS); -} - - - -PyDoc_STRVAR(mod_get_clocks_used_doing_optional_cleanup_doc, - "get_clocks_used_doing_optional_cleanup() -> Integer\n" - "\n" - "Get the number of clock ticks the program has used doing optional " - "greenlet cleanup.\n" - "Beginning in greenlet 2.0, greenlet tries to find and dispose of greenlets\n" - "that leaked after a thread exited. This requires invoking Python's garbage collector,\n" - "which may have a performance cost proportional to the number of live objects.\n" - "This function returns the amount of processor time\n" - "greenlet has used to do this. In programs that run with very large amounts of live\n" - "objects, this metric can be used to decide whether the cost of doing this cleanup\n" - "is worth the memory leak being corrected. If not, you can disable the cleanup\n" - "using ``enable_optional_cleanup(False)``.\n" - "The units are arbitrary and can only be compared to themselves (similarly to ``time.clock()``);\n" - "for example, to see how it scales with your heap. You can attempt to convert them into seconds\n" - "by dividing by the value of CLOCKS_PER_SEC." - "If cleanup has been disabled, returns None." - "\n" - "This is an implementation specific, provisional API. It may be changed or removed\n" - "in the future.\n" - ".. versionadded:: 2.0" - ); -static PyObject* -mod_get_clocks_used_doing_optional_cleanup(PyObject* UNUSED(module)) -{ - std::clock_t& clocks = ThreadState::clocks_used_doing_gc(); - - if (clocks == std::clock_t(-1)) { - Py_RETURN_NONE; - } - // This might not actually work on some implementations; clock_t - // is an opaque type. - return PyLong_FromSsize_t(clocks); -} - -PyDoc_STRVAR(mod_enable_optional_cleanup_doc, - "mod_enable_optional_cleanup(bool) -> None\n" - "\n" - "Enable or disable optional cleanup operations.\n" - "See ``get_clocks_used_doing_optional_cleanup()`` for details.\n" - ); -static PyObject* -mod_enable_optional_cleanup(PyObject* UNUSED(module), PyObject* flag) -{ - int is_true = PyObject_IsTrue(flag); - if (is_true == -1) { - return nullptr; - } - - std::clock_t& clocks = ThreadState::clocks_used_doing_gc(); - if (is_true) { - // If we already have a value, we don't want to lose it. - if (clocks == std::clock_t(-1)) { - clocks = 0; - } - } - else { - clocks = std::clock_t(-1); - } - Py_RETURN_NONE; -} - - - - -#if !GREENLET_PY313 -PyDoc_STRVAR(mod_get_tstate_trash_delete_nesting_doc, - "get_tstate_trash_delete_nesting() -> Integer\n" - "\n" - "Return the 'trash can' nesting level. Testing only.\n"); -static PyObject* -mod_get_tstate_trash_delete_nesting(PyObject* UNUSED(module)) -{ - PyThreadState* tstate = PyThreadState_GET(); - -#if GREENLET_PY312 - return PyLong_FromLong(tstate->trash.delete_nesting); -#else - return PyLong_FromLong(tstate->trash_delete_nesting); -#endif -} -#endif - - - - -static PyMethodDef GreenMethods[] = { - { - .ml_name="getcurrent", - .ml_meth=(PyCFunction)mod_getcurrent, - .ml_flags=METH_NOARGS, - .ml_doc=mod_getcurrent_doc - }, - { - .ml_name="settrace", - .ml_meth=(PyCFunction)mod_settrace, - .ml_flags=METH_VARARGS, - .ml_doc=mod_settrace_doc - }, - { - .ml_name="gettrace", - .ml_meth=(PyCFunction)mod_gettrace, - .ml_flags=METH_NOARGS, - .ml_doc=mod_gettrace_doc - }, - { - .ml_name="set_thread_local", - .ml_meth=(PyCFunction)mod_set_thread_local, - .ml_flags=METH_VARARGS, - .ml_doc=mod_set_thread_local_doc - }, - { - .ml_name="get_pending_cleanup_count", - .ml_meth=(PyCFunction)mod_get_pending_cleanup_count, - .ml_flags=METH_NOARGS, - .ml_doc=mod_get_pending_cleanup_count_doc - }, - { - .ml_name="get_total_main_greenlets", - .ml_meth=(PyCFunction)mod_get_total_main_greenlets, - .ml_flags=METH_NOARGS, - .ml_doc=mod_get_total_main_greenlets_doc - }, - { - .ml_name="get_clocks_used_doing_optional_cleanup", - .ml_meth=(PyCFunction)mod_get_clocks_used_doing_optional_cleanup, - .ml_flags=METH_NOARGS, - .ml_doc=mod_get_clocks_used_doing_optional_cleanup_doc - }, - { - .ml_name="enable_optional_cleanup", - .ml_meth=(PyCFunction)mod_enable_optional_cleanup, - .ml_flags=METH_O, - .ml_doc=mod_enable_optional_cleanup_doc - }, -#if !GREENLET_PY313 - { - .ml_name="get_tstate_trash_delete_nesting", - .ml_meth=(PyCFunction)mod_get_tstate_trash_delete_nesting, - .ml_flags=METH_NOARGS, - .ml_doc=mod_get_tstate_trash_delete_nesting_doc - }, -#endif - {.ml_name=NULL, .ml_meth=NULL} /* Sentinel */ -}; - -static const char* const copy_on_greentype[] = { - "getcurrent", - "error", - "GreenletExit", - "settrace", - "gettrace", - NULL -}; - -static struct PyModuleDef greenlet_module_def = { - .m_base=PyModuleDef_HEAD_INIT, - .m_name="greenlet._greenlet", - .m_doc=NULL, - .m_size=-1, - .m_methods=GreenMethods, -}; - - -#endif - -#ifdef __clang__ -# pragma clang diagnostic pop -#elif defined(__GNUC__) -# pragma GCC diagnostic pop -#endif diff --git a/myenv/Lib/site-packages/greenlet/TBrokenGreenlet.cpp b/myenv/Lib/site-packages/greenlet/TBrokenGreenlet.cpp deleted file mode 100644 index 7e9ab5b..0000000 --- a/myenv/Lib/site-packages/greenlet/TBrokenGreenlet.cpp +++ /dev/null @@ -1,45 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of greenlet::UserGreenlet. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ - -#include "TGreenlet.hpp" - -namespace greenlet { - -void* BrokenGreenlet::operator new(size_t UNUSED(count)) -{ - return allocator.allocate(1); -} - - -void BrokenGreenlet::operator delete(void* ptr) -{ - return allocator.deallocate(static_cast(ptr), - 1); -} - -greenlet::PythonAllocator greenlet::BrokenGreenlet::allocator; - -bool -BrokenGreenlet::force_slp_switch_error() const noexcept -{ - return this->_force_slp_switch_error; -} - -UserGreenlet::switchstack_result_t BrokenGreenlet::g_switchstack(void) -{ - if (this->_force_switch_error) { - return switchstack_result_t(-1); - } - return UserGreenlet::g_switchstack(); -} - -}; //namespace greenlet diff --git a/myenv/Lib/site-packages/greenlet/TExceptionState.cpp b/myenv/Lib/site-packages/greenlet/TExceptionState.cpp deleted file mode 100644 index 08a94ae..0000000 --- a/myenv/Lib/site-packages/greenlet/TExceptionState.cpp +++ /dev/null @@ -1,62 +0,0 @@ -#ifndef GREENLET_EXCEPTION_STATE_CPP -#define GREENLET_EXCEPTION_STATE_CPP - -#include -#include "TGreenlet.hpp" - -namespace greenlet { - - -ExceptionState::ExceptionState() -{ - this->clear(); -} - -void ExceptionState::operator<<(const PyThreadState *const tstate) noexcept -{ - this->exc_info = tstate->exc_info; - this->exc_state = tstate->exc_state; -} - -void ExceptionState::operator>>(PyThreadState *const tstate) noexcept -{ - tstate->exc_state = this->exc_state; - tstate->exc_info = - this->exc_info ? this->exc_info : &tstate->exc_state; - this->clear(); -} - -void ExceptionState::clear() noexcept -{ - this->exc_info = nullptr; - this->exc_state.exc_value = nullptr; -#if !GREENLET_PY311 - this->exc_state.exc_type = nullptr; - this->exc_state.exc_traceback = nullptr; -#endif - this->exc_state.previous_item = nullptr; -} - -int ExceptionState::tp_traverse(visitproc visit, void* arg) noexcept -{ - Py_VISIT(this->exc_state.exc_value); -#if !GREENLET_PY311 - Py_VISIT(this->exc_state.exc_type); - Py_VISIT(this->exc_state.exc_traceback); -#endif - return 0; -} - -void ExceptionState::tp_clear() noexcept -{ - Py_CLEAR(this->exc_state.exc_value); -#if !GREENLET_PY311 - Py_CLEAR(this->exc_state.exc_type); - Py_CLEAR(this->exc_state.exc_traceback); -#endif -} - - -}; // namespace greenlet - -#endif // GREENLET_EXCEPTION_STATE_CPP diff --git a/myenv/Lib/site-packages/greenlet/TGreenlet.cpp b/myenv/Lib/site-packages/greenlet/TGreenlet.cpp deleted file mode 100644 index 4698a17..0000000 --- a/myenv/Lib/site-packages/greenlet/TGreenlet.cpp +++ /dev/null @@ -1,718 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of greenlet::Greenlet. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#ifndef TGREENLET_CPP -#define TGREENLET_CPP -#include "greenlet_internal.hpp" -#include "TGreenlet.hpp" - - -#include "TGreenletGlobals.cpp" -#include "TThreadStateDestroy.cpp" - -namespace greenlet { - -Greenlet::Greenlet(PyGreenlet* p) - : Greenlet(p, StackState()) -{ -} - -Greenlet::Greenlet(PyGreenlet* p, const StackState& initial_stack) - : _self(p), stack_state(initial_stack) -{ - assert(p->pimpl == nullptr); - p->pimpl = this; -} - -Greenlet::~Greenlet() -{ - // XXX: Can't do this. tp_clear is a virtual function, and by the - // time we're here, we've sliced off our child classes. - //this->tp_clear(); - this->_self->pimpl = nullptr; -} - -bool -Greenlet::force_slp_switch_error() const noexcept -{ - return false; -} - -void -Greenlet::release_args() -{ - this->switch_args.CLEAR(); -} - -/** - * CAUTION: This will allocate memory and may trigger garbage - * collection and arbitrary Python code. - */ -OwnedObject -Greenlet::throw_GreenletExit_during_dealloc(const ThreadState& UNUSED(current_thread_state)) -{ - // If we're killed because we lost all references in the - // middle of a switch, that's ok. Don't reset the args/kwargs, - // we still want to pass them to the parent. - PyErr_SetString(mod_globs->PyExc_GreenletExit, - "Killing the greenlet because all references have vanished."); - // To get here it had to have run before - return this->g_switch(); -} - -inline void -Greenlet::slp_restore_state() noexcept -{ -#ifdef SLP_BEFORE_RESTORE_STATE - SLP_BEFORE_RESTORE_STATE(); -#endif - this->stack_state.copy_heap_to_stack( - this->thread_state()->borrow_current()->stack_state); -} - - -inline int -Greenlet::slp_save_state(char *const stackref) noexcept -{ - // XXX: This used to happen in the middle, before saving, but - // after finding the next owner. Does that matter? This is - // only defined for Sparc/GCC where it flushes register - // windows to the stack (I think) -#ifdef SLP_BEFORE_SAVE_STATE - SLP_BEFORE_SAVE_STATE(); -#endif - return this->stack_state.copy_stack_to_heap(stackref, - this->thread_state()->borrow_current()->stack_state); -} - -/** - * CAUTION: This will allocate memory and may trigger garbage - * collection and arbitrary Python code. - */ -OwnedObject -Greenlet::on_switchstack_or_initialstub_failure( - Greenlet* target, - const Greenlet::switchstack_result_t& err, - const bool target_was_me, - const bool was_initial_stub) -{ - // If we get here, either g_initialstub() - // failed, or g_switchstack() failed. Either one of those - // cases SHOULD leave us in the original greenlet with a valid stack. - if (!PyErr_Occurred()) { - PyErr_SetString( - PyExc_SystemError, - was_initial_stub - ? "Failed to switch stacks into a greenlet for the first time." - : "Failed to switch stacks into a running greenlet."); - } - this->release_args(); - - if (target && !target_was_me) { - target->murder_in_place(); - } - - assert(!err.the_new_current_greenlet); - assert(!err.origin_greenlet); - return OwnedObject(); - -} - -OwnedGreenlet -Greenlet::g_switchstack_success() noexcept -{ - PyThreadState* tstate = PyThreadState_GET(); - // restore the saved state - this->python_state >> tstate; - this->exception_state >> tstate; - - // The thread state hasn't been changed yet. - ThreadState* thread_state = this->thread_state(); - OwnedGreenlet result(thread_state->get_current()); - thread_state->set_current(this->self()); - //assert(thread_state->borrow_current().borrow() == this->_self); - return result; -} - -Greenlet::switchstack_result_t -Greenlet::g_switchstack(void) -{ - // if any of these assertions fail, it's likely because we - // switched away and tried to switch back to us. Early stages of - // switching are not reentrant because we re-use ``this->args()``. - // Switching away would happen if we trigger a garbage collection - // (by just using some Python APIs that happen to allocate Python - // objects) and some garbage had weakref callbacks or __del__ that - // switches (people don't write code like that by hand, but with - // gevent it's possible without realizing it) - assert(this->args() || PyErr_Occurred()); - { /* save state */ - if (this->thread_state()->is_current(this->self())) { - // Hmm, nothing to do. - // TODO: Does this bypass trace events that are - // important? - return switchstack_result_t(0, - this, this->thread_state()->borrow_current()); - } - BorrowedGreenlet current = this->thread_state()->borrow_current(); - PyThreadState* tstate = PyThreadState_GET(); - - current->python_state << tstate; - current->exception_state << tstate; - this->python_state.will_switch_from(tstate); - switching_thread_state = this; - current->expose_frames(); - } - assert(this->args() || PyErr_Occurred()); - // If this is the first switch into a greenlet, this will - // return twice, once with 1 in the new greenlet, once with 0 - // in the origin. - int err; - if (this->force_slp_switch_error()) { - err = -1; - } - else { - err = slp_switch(); - } - - if (err < 0) { /* error */ - // Tested by - // test_greenlet.TestBrokenGreenlets.test_failed_to_slp_switch_into_running - // - // It's not clear if it's worth trying to clean up and - // continue here. Failing to switch stacks is a big deal which - // may not be recoverable (who knows what state the stack is in). - // Also, we've stolen references in preparation for calling - // ``g_switchstack_success()`` and we don't have a clean - // mechanism for backing that all out. - Py_FatalError("greenlet: Failed low-level slp_switch(). The stack is probably corrupt."); - } - - // No stack-based variables are valid anymore. - - // But the global is volatile so we can reload it without the - // compiler caching it from earlier. - Greenlet* greenlet_that_switched_in = switching_thread_state; // aka this - switching_thread_state = nullptr; - // except that no stack variables are valid, we would: - // assert(this == greenlet_that_switched_in); - - // switchstack success is where we restore the exception state, - // etc. It returns the origin greenlet because its convenient. - - OwnedGreenlet origin = greenlet_that_switched_in->g_switchstack_success(); - assert(greenlet_that_switched_in->args() || PyErr_Occurred()); - return switchstack_result_t(err, greenlet_that_switched_in, origin); -} - - -inline void -Greenlet::check_switch_allowed() const -{ - // TODO: Make this take a parameter of the current greenlet, - // or current main greenlet, to make the check for - // cross-thread switching cheaper. Surely somewhere up the - // call stack we've already accessed the thread local variable. - - // We expect to always have a main greenlet now; accessing the thread state - // created it. However, if we get here and cleanup has already - // begun because we're a greenlet that was running in a - // (now dead) thread, these invariants will not hold true. In - // fact, accessing `this->thread_state` may not even be possible. - - // If the thread this greenlet was running in is dead, - // we'll still have a reference to a main greenlet, but the - // thread state pointer we have is bogus. - // TODO: Give the objects an API to determine if they belong - // to a dead thread. - - const BorrowedMainGreenlet main_greenlet = this->find_main_greenlet_in_lineage(); - - if (!main_greenlet) { - throw PyErrOccurred(mod_globs->PyExc_GreenletError, - "cannot switch to a garbage collected greenlet"); - } - - if (!main_greenlet->thread_state()) { - throw PyErrOccurred(mod_globs->PyExc_GreenletError, - "cannot switch to a different thread (which happens to have exited)"); - } - - // The main greenlet we found was from the .parent lineage. - // That may or may not have any relationship to the main - // greenlet of the running thread. We can't actually access - // our this->thread_state members to try to check that, - // because it could be in the process of getting destroyed, - // but setting the main_greenlet->thread_state member to NULL - // may not be visible yet. So we need to check against the - // current thread state (once the cheaper checks are out of - // the way) - const BorrowedMainGreenlet current_main_greenlet = GET_THREAD_STATE().state().borrow_main_greenlet(); - if ( - // lineage main greenlet is not this thread's greenlet - current_main_greenlet != main_greenlet - || ( - // atteched to some thread - this->main_greenlet() - // XXX: Same condition as above. Was this supposed to be - // this->main_greenlet()? - && current_main_greenlet != main_greenlet) - // switching into a known dead thread (XXX: which, if we get here, - // is bad, because we just accessed the thread state, which is - // gone!) - || (!current_main_greenlet->thread_state())) { - // CAUTION: This may trigger memory allocations, gc, and - // arbitrary Python code. - throw PyErrOccurred( - mod_globs->PyExc_GreenletError, - "Cannot switch to a different thread\n\tCurrent: %R\n\tExpected: %R", - current_main_greenlet, main_greenlet); - } -} - -const OwnedObject -Greenlet::context() const -{ - using greenlet::PythonStateContext; - OwnedObject result; - - if (this->is_currently_running_in_some_thread()) { - /* Currently running greenlet: context is stored in the thread state, - not the greenlet object. */ - if (GET_THREAD_STATE().state().is_current(this->self())) { - result = PythonStateContext::context(PyThreadState_GET()); - } - else { - throw ValueError( - "cannot get context of a " - "greenlet that is running in a different thread"); - } - } - else { - /* Greenlet is not running: just return context. */ - result = this->python_state.context(); - } - if (!result) { - result = OwnedObject::None(); - } - return result; -} - - -void -Greenlet::context(BorrowedObject given) -{ - using greenlet::PythonStateContext; - if (!given) { - throw AttributeError("can't delete context attribute"); - } - if (given.is_None()) { - /* "Empty context" is stored as NULL, not None. */ - given = nullptr; - } - - //checks type, incrs refcnt - greenlet::refs::OwnedContext context(given); - PyThreadState* tstate = PyThreadState_GET(); - - if (this->is_currently_running_in_some_thread()) { - if (!GET_THREAD_STATE().state().is_current(this->self())) { - throw ValueError("cannot set context of a greenlet" - " that is running in a different thread"); - } - - /* Currently running greenlet: context is stored in the thread state, - not the greenlet object. */ - OwnedObject octx = OwnedObject::consuming(PythonStateContext::context(tstate)); - PythonStateContext::context(tstate, context.relinquish_ownership()); - } - else { - /* Greenlet is not running: just set context. Note that the - greenlet may be dead.*/ - this->python_state.context() = context; - } -} - -/** - * CAUTION: May invoke arbitrary Python code. - * - * Figure out what the result of ``greenlet.switch(arg, kwargs)`` - * should be and transfers ownership of it to the left-hand-side. - * - * If switch() was just passed an arg tuple, then we'll just return that. - * If only keyword arguments were passed, then we'll pass the keyword - * argument dict. Otherwise, we'll create a tuple of (args, kwargs) and - * return both. - * - * CAUTION: This may allocate a new tuple object, which may - * cause the Python garbage collector to run, which in turn may - * run arbitrary Python code that switches. - */ -OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept -{ - // Because this may invoke arbitrary Python code, which could - // result in switching back to us, we need to get the - // arguments locally on the stack. - assert(rhs); - OwnedObject args = rhs.args(); - OwnedObject kwargs = rhs.kwargs(); - rhs.CLEAR(); - // We shouldn't be called twice for the same switch. - assert(args || kwargs); - assert(!rhs); - - if (!kwargs) { - lhs = args; - } - else if (!PyDict_Size(kwargs.borrow())) { - lhs = args; - } - else if (!PySequence_Length(args.borrow())) { - lhs = kwargs; - } - else { - // PyTuple_Pack allocates memory, may GC, may run arbitrary - // Python code. - lhs = OwnedObject::consuming(PyTuple_Pack(2, args.borrow(), kwargs.borrow())); - } - return lhs; -} - -static OwnedObject -g_handle_exit(const OwnedObject& greenlet_result) -{ - if (!greenlet_result && mod_globs->PyExc_GreenletExit.PyExceptionMatches()) { - /* catch and ignore GreenletExit */ - PyErrFetchParam val; - PyErr_Fetch(PyErrFetchParam(), val, PyErrFetchParam()); - if (!val) { - return OwnedObject::None(); - } - return OwnedObject(val); - } - - if (greenlet_result) { - // package the result into a 1-tuple - // PyTuple_Pack increments the reference of its arguments, - // so we always need to decref the greenlet result; - // the owner will do that. - return OwnedObject::consuming(PyTuple_Pack(1, greenlet_result.borrow())); - } - - return OwnedObject(); -} - - - -/** - * May run arbitrary Python code. - */ -OwnedObject -Greenlet::g_switch_finish(const switchstack_result_t& err) -{ - assert(err.the_new_current_greenlet == this); - - ThreadState& state = *this->thread_state(); - // Because calling the trace function could do arbitrary things, - // including switching away from this greenlet and then maybe - // switching back, we need to capture the arguments now so that - // they don't change. - OwnedObject result; - if (this->args()) { - result <<= this->args(); - } - else { - assert(PyErr_Occurred()); - } - assert(!this->args()); - try { - // Our only caller handles the bad error case - assert(err.status >= 0); - assert(state.borrow_current() == this->self()); - if (OwnedObject tracefunc = state.get_tracefunc()) { - assert(result || PyErr_Occurred()); - g_calltrace(tracefunc, - result ? mod_globs->event_switch : mod_globs->event_throw, - err.origin_greenlet, - this->self()); - } - // The above could have invoked arbitrary Python code, but - // it couldn't switch back to this object and *also* - // throw an exception, so the args won't have changed. - - if (PyErr_Occurred()) { - // We get here if we fell of the end of the run() function - // raising an exception. The switch itself was - // successful, but the function raised. - // valgrind reports that memory allocated here can still - // be reached after a test run. - throw PyErrOccurred::from_current(); - } - return result; - } - catch (const PyErrOccurred&) { - /* Turn switch errors into switch throws */ - /* Turn trace errors into switch throws */ - this->release_args(); - throw; - } -} - -void -Greenlet::g_calltrace(const OwnedObject& tracefunc, - const greenlet::refs::ImmortalEventName& event, - const BorrowedGreenlet& origin, - const BorrowedGreenlet& target) -{ - PyErrPieces saved_exc; - try { - TracingGuard tracing_guard; - // TODO: We have saved the active exception (if any) that's - // about to be raised. In the 'throw' case, we could provide - // the exception to the tracefunction, which seems very helpful. - tracing_guard.CallTraceFunction(tracefunc, event, origin, target); - } - catch (const PyErrOccurred&) { - // In case of exceptions trace function is removed, - // and any existing exception is replaced with the tracing - // exception. - GET_THREAD_STATE().state().set_tracefunc(Py_None); - throw; - } - - saved_exc.PyErrRestore(); - assert( - (event == mod_globs->event_throw && PyErr_Occurred()) - || (event == mod_globs->event_switch && !PyErr_Occurred()) - ); -} - -void -Greenlet::murder_in_place() -{ - if (this->active()) { - assert(!this->is_currently_running_in_some_thread()); - this->deactivate_and_free(); - } -} - -inline void -Greenlet::deactivate_and_free() -{ - if (!this->active()) { - return; - } - // Throw away any saved stack. - this->stack_state = StackState(); - assert(!this->stack_state.active()); - // Throw away any Python references. - // We're holding a borrowed reference to the last - // frame we executed. Since we borrowed it, the - // normal traversal, clear, and dealloc functions - // ignore it, meaning it leaks. (The thread state - // object can't find it to clear it when that's - // deallocated either, because by definition if we - // got an object on this list, it wasn't - // running and the thread state doesn't have - // this frame.) - // So here, we *do* clear it. - this->python_state.tp_clear(true); -} - -bool -Greenlet::belongs_to_thread(const ThreadState* thread_state) const -{ - if (!this->thread_state() // not running anywhere, or thread - // exited - || !thread_state) { // same, or there is no thread state. - return false; - } - return true; -} - - -void -Greenlet::deallocing_greenlet_in_thread(const ThreadState* current_thread_state) -{ - /* Cannot raise an exception to kill the greenlet if - it is not running in the same thread! */ - if (this->belongs_to_thread(current_thread_state)) { - assert(current_thread_state); - // To get here it had to have run before - /* Send the greenlet a GreenletExit exception. */ - - // We don't care about the return value, only whether an - // exception happened. - this->throw_GreenletExit_during_dealloc(*current_thread_state); - return; - } - - // Not the same thread! Temporarily save the greenlet - // into its thread's deleteme list, *if* it exists. - // If that thread has already exited, and processed its pending - // cleanup, we'll never be able to clean everything up: we won't - // be able to raise an exception. - // That's mostly OK! Since we can't add it to a list, our refcount - // won't increase, and we'll go ahead with the DECREFs later. - ThreadState *const thread_state = this->thread_state(); - if (thread_state) { - thread_state->delete_when_thread_running(this->self()); - } - else { - // The thread is dead, we can't raise an exception. - // We need to make it look non-active, though, so that dealloc - // finishes killing it. - this->deactivate_and_free(); - } - return; -} - - -int -Greenlet::tp_traverse(visitproc visit, void* arg) -{ - - int result; - if ((result = this->exception_state.tp_traverse(visit, arg)) != 0) { - return result; - } - //XXX: This is ugly. But so is handling everything having to do - //with the top frame. - bool visit_top_frame = this->was_running_in_dead_thread(); - // When true, the thread is dead. Our implicit weak reference to the - // frame is now all that's left; we consider ourselves to - // strongly own it now. - if ((result = this->python_state.tp_traverse(visit, arg, visit_top_frame)) != 0) { - return result; - } - return 0; -} - -int -Greenlet::tp_clear() -{ - bool own_top_frame = this->was_running_in_dead_thread(); - this->exception_state.tp_clear(); - this->python_state.tp_clear(own_top_frame); - return 0; -} - -bool Greenlet::is_currently_running_in_some_thread() const -{ - return this->stack_state.active() && !this->python_state.top_frame(); -} - -#if GREENLET_PY312 -void GREENLET_NOINLINE(Greenlet::expose_frames)() -{ - if (!this->python_state.top_frame()) { - return; - } - - _PyInterpreterFrame* last_complete_iframe = nullptr; - _PyInterpreterFrame* iframe = this->python_state.top_frame()->f_frame; - while (iframe) { - // We must make a copy before looking at the iframe contents, - // since iframe might point to a portion of the greenlet's C stack - // that was spilled when switching greenlets. - _PyInterpreterFrame iframe_copy; - this->stack_state.copy_from_stack(&iframe_copy, iframe, sizeof(*iframe)); - if (!_PyFrame_IsIncomplete(&iframe_copy)) { - // If the iframe were OWNED_BY_CSTACK then it would always be - // incomplete. Since it's not incomplete, it's not on the C stack - // and we can access it through the original `iframe` pointer - // directly. This is important since GetFrameObject might - // lazily _create_ the frame object and we don't want the - // interpreter to lose track of it. - assert(iframe_copy.owner != FRAME_OWNED_BY_CSTACK); - - // We really want to just write: - // PyFrameObject* frame = _PyFrame_GetFrameObject(iframe); - // but _PyFrame_GetFrameObject calls _PyFrame_MakeAndSetFrameObject - // which is not a visible symbol in libpython. The easiest - // way to get a public function to call it is using - // PyFrame_GetBack, which is defined as follows: - // assert(frame != NULL); - // assert(!_PyFrame_IsIncomplete(frame->f_frame)); - // PyFrameObject *back = frame->f_back; - // if (back == NULL) { - // _PyInterpreterFrame *prev = frame->f_frame->previous; - // prev = _PyFrame_GetFirstComplete(prev); - // if (prev) { - // back = _PyFrame_GetFrameObject(prev); - // } - // } - // return (PyFrameObject*)Py_XNewRef(back); - if (!iframe->frame_obj) { - PyFrameObject dummy_frame; - _PyInterpreterFrame dummy_iframe; - dummy_frame.f_back = nullptr; - dummy_frame.f_frame = &dummy_iframe; - // force the iframe to be considered complete without - // needing to check its code object: - dummy_iframe.owner = FRAME_OWNED_BY_GENERATOR; - dummy_iframe.previous = iframe; - assert(!_PyFrame_IsIncomplete(&dummy_iframe)); - // Drop the returned reference immediately; the iframe - // continues to hold a strong reference - Py_XDECREF(PyFrame_GetBack(&dummy_frame)); - assert(iframe->frame_obj); - } - - // This is a complete frame, so make the last one of those we saw - // point at it, bypassing any incomplete frames (which may have - // been on the C stack) in between the two. We're overwriting - // last_complete_iframe->previous and need that to be reversible, - // so we store the original previous ptr in the frame object - // (which we must have created on a previous iteration through - // this loop). The frame object has a bunch of storage that is - // only used when its iframe is OWNED_BY_FRAME_OBJECT, which only - // occurs when the frame object outlives the frame's execution, - // which can't have happened yet because the frame is currently - // executing as far as the interpreter is concerned. So, we can - // reuse it for our own purposes. - assert(iframe->owner == FRAME_OWNED_BY_THREAD - || iframe->owner == FRAME_OWNED_BY_GENERATOR); - if (last_complete_iframe) { - assert(last_complete_iframe->frame_obj); - memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0], - &last_complete_iframe->previous, sizeof(void *)); - last_complete_iframe->previous = iframe; - } - last_complete_iframe = iframe; - } - // Frames that are OWNED_BY_FRAME_OBJECT are linked via the - // frame's f_back while all others are linked via the iframe's - // previous ptr. Since all the frames we traverse are running - // as far as the interpreter is concerned, we don't have to - // worry about the OWNED_BY_FRAME_OBJECT case. - iframe = iframe_copy.previous; - } - - // Give the outermost complete iframe a null previous pointer to - // account for any potential incomplete/C-stack iframes between it - // and the actual top-of-stack - if (last_complete_iframe) { - assert(last_complete_iframe->frame_obj); - memcpy(&last_complete_iframe->frame_obj->_f_frame_data[0], - &last_complete_iframe->previous, sizeof(void *)); - last_complete_iframe->previous = nullptr; - } -} -#else -void Greenlet::expose_frames() -{ - -} -#endif - -}; // namespace greenlet -#endif diff --git a/myenv/Lib/site-packages/greenlet/TGreenlet.hpp b/myenv/Lib/site-packages/greenlet/TGreenlet.hpp deleted file mode 100644 index 512f7fb..0000000 --- a/myenv/Lib/site-packages/greenlet/TGreenlet.hpp +++ /dev/null @@ -1,813 +0,0 @@ -#ifndef GREENLET_GREENLET_HPP -#define GREENLET_GREENLET_HPP -/* - * Declarations of the core data structures. -*/ - -#define PY_SSIZE_T_CLEAN -#include - -#include "greenlet_compiler_compat.hpp" -#include "greenlet_refs.hpp" -#include "greenlet_cpython_compat.hpp" -#include "greenlet_allocator.hpp" - -using greenlet::refs::OwnedObject; -using greenlet::refs::OwnedGreenlet; -using greenlet::refs::OwnedMainGreenlet; -using greenlet::refs::BorrowedGreenlet; - -#if PY_VERSION_HEX < 0x30B00A6 -# define _PyCFrame CFrame -# define _PyInterpreterFrame _interpreter_frame -#endif - -#if GREENLET_PY312 -# define Py_BUILD_CORE -# include "internal/pycore_frame.h" -#endif - -// XXX: TODO: Work to remove all virtual functions -// for speed of calling and size of objects (no vtable). -// One pattern is the Curiously Recurring Template -namespace greenlet -{ - class ExceptionState - { - private: - G_NO_COPIES_OF_CLS(ExceptionState); - - // Even though these are borrowed objects, we actually own - // them, when they're not null. - // XXX: Express that in the API. - private: - _PyErr_StackItem* exc_info; - _PyErr_StackItem exc_state; - public: - ExceptionState(); - void operator<<(const PyThreadState *const tstate) noexcept; - void operator>>(PyThreadState* tstate) noexcept; - void clear() noexcept; - - int tp_traverse(visitproc visit, void* arg) noexcept; - void tp_clear() noexcept; - }; - - template - void operator<<(const PyThreadState *const tstate, T& exc); - - class PythonStateContext - { - protected: - greenlet::refs::OwnedContext _context; - public: - inline const greenlet::refs::OwnedContext& context() const - { - return this->_context; - } - inline greenlet::refs::OwnedContext& context() - { - return this->_context; - } - - inline void tp_clear() - { - this->_context.CLEAR(); - } - - template - inline static PyObject* context(T* tstate) - { - return tstate->context; - } - - template - inline static void context(T* tstate, PyObject* new_context) - { - tstate->context = new_context; - tstate->context_ver++; - } - }; - class SwitchingArgs; - class PythonState : public PythonStateContext - { - public: - typedef greenlet::refs::OwnedReference OwnedFrame; - private: - G_NO_COPIES_OF_CLS(PythonState); - // We own this if we're suspended (although currently we don't - // tp_traverse into it; that's a TODO). If we're running, it's - // empty. If we get deallocated and *still* have a frame, it - // won't be reachable from the place that normally decref's - // it, so we need to do it (hence owning it). - OwnedFrame _top_frame; -#if GREENLET_USE_CFRAME - _PyCFrame* cframe; - int use_tracing; -#endif -#if GREENLET_PY312 - int py_recursion_depth; - int c_recursion_depth; -#else - int recursion_depth; -#endif -#if GREENLET_PY313 - PyObject *delete_later; -#else - int trash_delete_nesting; -#endif -#if GREENLET_PY311 - _PyInterpreterFrame* current_frame; - _PyStackChunk* datastack_chunk; - PyObject** datastack_top; - PyObject** datastack_limit; -#endif - // The PyInterpreterFrame list on 3.12+ contains some entries that are - // on the C stack, which can't be directly accessed while a greenlet is - // suspended. In order to keep greenlet gr_frame introspection working, - // we adjust stack switching to rewrite the interpreter frame list - // to skip these C-stack frames; we call this "exposing" the greenlet's - // frames because it makes them valid to work with in Python. Then when - // the greenlet is resumed we need to remember to reverse the operation - // we did. The C-stack frames are "entry frames" which are a low-level - // interpreter detail; they're not needed for introspection, but do - // need to be present for the eval loop to work. - void unexpose_frames(); - - public: - - PythonState(); - // You can use this for testing whether we have a frame - // or not. It returns const so they can't modify it. - const OwnedFrame& top_frame() const noexcept; - - inline void operator<<(const PyThreadState *const tstate) noexcept; - inline void operator>>(PyThreadState* tstate) noexcept; - void clear() noexcept; - - int tp_traverse(visitproc visit, void* arg, bool visit_top_frame) noexcept; - void tp_clear(bool own_top_frame) noexcept; - void set_initial_state(const PyThreadState* const tstate) noexcept; -#if GREENLET_USE_CFRAME - void set_new_cframe(_PyCFrame& frame) noexcept; -#endif - - void may_switch_away() noexcept; - inline void will_switch_from(PyThreadState *const origin_tstate) noexcept; - void did_finish(PyThreadState* tstate) noexcept; - }; - - class StackState - { - // By having only plain C (POD) members, no virtual functions - // or bases, we get a trivial assignment operator generated - // for us. However, that's not safe since we do manage memory. - // So we declare an assignment operator that only works if we - // don't have any memory allocated. (We don't use - // std::shared_ptr for reference counting just to keep this - // object small) - private: - char* _stack_start; - char* stack_stop; - char* stack_copy; - intptr_t _stack_saved; - StackState* stack_prev; - inline int copy_stack_to_heap_up_to(const char* const stop) noexcept; - inline void free_stack_copy() noexcept; - - public: - /** - * Creates a started, but inactive, state, using *current* - * as the previous. - */ - StackState(void* mark, StackState& current); - /** - * Creates an inactive, unstarted, state. - */ - StackState(); - ~StackState(); - StackState(const StackState& other); - StackState& operator=(const StackState& other); - inline void copy_heap_to_stack(const StackState& current) noexcept; - inline int copy_stack_to_heap(char* const stackref, const StackState& current) noexcept; - inline bool started() const noexcept; - inline bool main() const noexcept; - inline bool active() const noexcept; - inline void set_active() noexcept; - inline void set_inactive() noexcept; - inline intptr_t stack_saved() const noexcept; - inline char* stack_start() const noexcept; - static inline StackState make_main() noexcept; -#ifdef GREENLET_USE_STDIO - friend std::ostream& operator<<(std::ostream& os, const StackState& s); -#endif - - // Fill in [dest, dest + n) with the values that would be at - // [src, src + n) while this greenlet is running. This is like memcpy - // except that if the greenlet is suspended it accounts for the portion - // of the greenlet's stack that was spilled to the heap. `src` may - // be on this greenlet's stack, or on the heap, but not on a different - // greenlet's stack. - void copy_from_stack(void* dest, const void* src, size_t n) const; - }; -#ifdef GREENLET_USE_STDIO - std::ostream& operator<<(std::ostream& os, const StackState& s); -#endif - - class SwitchingArgs - { - private: - G_NO_ASSIGNMENT_OF_CLS(SwitchingArgs); - // If args and kwargs are both false (NULL), this is a *throw*, not a - // switch. PyErr_... must have been called already. - OwnedObject _args; - OwnedObject _kwargs; - public: - - SwitchingArgs() - {} - - SwitchingArgs(const OwnedObject& args, const OwnedObject& kwargs) - : _args(args), - _kwargs(kwargs) - {} - - SwitchingArgs(const SwitchingArgs& other) - : _args(other._args), - _kwargs(other._kwargs) - {} - - const OwnedObject& args() - { - return this->_args; - } - - const OwnedObject& kwargs() - { - return this->_kwargs; - } - - /** - * Moves ownership from the argument to this object. - */ - SwitchingArgs& operator<<=(SwitchingArgs& other) - { - if (this != &other) { - this->_args = other._args; - this->_kwargs = other._kwargs; - other.CLEAR(); - } - return *this; - } - - /** - * Acquires ownership of the argument (consumes the reference). - */ - SwitchingArgs& operator<<=(PyObject* args) - { - this->_args = OwnedObject::consuming(args); - this->_kwargs.CLEAR(); - return *this; - } - - /** - * Acquires ownership of the argument. - * - * Sets the args to be the given value; clears the kwargs. - */ - SwitchingArgs& operator<<=(OwnedObject& args) - { - assert(&args != &this->_args); - this->_args = args; - this->_kwargs.CLEAR(); - args.CLEAR(); - - return *this; - } - - explicit operator bool() const noexcept - { - return this->_args || this->_kwargs; - } - - inline void CLEAR() - { - this->_args.CLEAR(); - this->_kwargs.CLEAR(); - } - - const std::string as_str() const noexcept - { - return PyUnicode_AsUTF8( - OwnedObject::consuming( - PyUnicode_FromFormat( - "SwitchingArgs(args=%R, kwargs=%R)", - this->_args.borrow(), - this->_kwargs.borrow() - ) - ).borrow() - ); - } - }; - - class ThreadState; - - class UserGreenlet; - class MainGreenlet; - - class Greenlet - { - private: - G_NO_COPIES_OF_CLS(Greenlet); - PyGreenlet* const _self; - private: - // XXX: Work to remove these. - friend class ThreadState; - friend class UserGreenlet; - friend class MainGreenlet; - protected: - ExceptionState exception_state; - SwitchingArgs switch_args; - StackState stack_state; - PythonState python_state; - Greenlet(PyGreenlet* p, const StackState& initial_state); - public: - // This constructor takes ownership of the PyGreenlet, by - // setting ``p->pimpl = this;``. - Greenlet(PyGreenlet* p); - virtual ~Greenlet(); - - const OwnedObject context() const; - - // You MUST call this _very_ early in the switching process to - // prepare anything that may need prepared. This might perform - // garbage collections or otherwise run arbitrary Python code. - // - // One specific use of it is for Python 3.11+, preventing - // running arbitrary code at unsafe times. See - // PythonState::may_switch_away(). - inline void may_switch_away() - { - this->python_state.may_switch_away(); - } - - inline void context(refs::BorrowedObject new_context); - - inline SwitchingArgs& args() - { - return this->switch_args; - } - - virtual const refs::BorrowedMainGreenlet main_greenlet() const = 0; - - inline intptr_t stack_saved() const noexcept - { - return this->stack_state.stack_saved(); - } - - // This is used by the macro SLP_SAVE_STATE to compute the - // difference in stack sizes. It might be nice to handle the - // computation ourself, but the type of the result - // varies by platform, so doing it in the macro is the - // simplest way. - inline const char* stack_start() const noexcept - { - return this->stack_state.stack_start(); - } - - virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state); - virtual OwnedObject g_switch() = 0; - /** - * Force the greenlet to appear dead. Used when it's not - * possible to throw an exception into a greenlet anymore. - * - * This losses access to the thread state and the main greenlet. - */ - virtual void murder_in_place(); - - /** - * Called when somebody notices we were running in a dead - * thread to allow cleaning up resources (because we can't - * raise GreenletExit into it anymore). - * This is very similar to ``murder_in_place()``, except that - * it DOES NOT lose the main greenlet or thread state. - */ - inline void deactivate_and_free(); - - - // Called when some thread wants to deallocate a greenlet - // object. - // The thread may or may not be the same thread the greenlet - // was running in. - // The thread state will be null if the thread the greenlet - // was running in was known to have exited. - void deallocing_greenlet_in_thread(const ThreadState* current_state); - - // Must be called on 3.12+ before exposing a suspended greenlet's - // frames to user code. This rewrites the linked list of interpreter - // frames to skip the ones that are being stored on the C stack (which - // can't be safely accessed while the greenlet is suspended because - // that stack space might be hosting a different greenlet), and - // sets PythonState::frames_were_exposed so we remember to restore - // the original list before resuming the greenlet. The C-stack frames - // are a low-level interpreter implementation detail; while they're - // important to the bytecode eval loop, they're superfluous for - // introspection purposes. - void expose_frames(); - - - // TODO: Figure out how to make these non-public. - inline void slp_restore_state() noexcept; - inline int slp_save_state(char *const stackref) noexcept; - - inline bool is_currently_running_in_some_thread() const; - virtual bool belongs_to_thread(const ThreadState* state) const; - - inline bool started() const - { - return this->stack_state.started(); - } - inline bool active() const - { - return this->stack_state.active(); - } - inline bool main() const - { - return this->stack_state.main(); - } - virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const = 0; - - virtual const OwnedGreenlet parent() const = 0; - virtual void parent(const refs::BorrowedObject new_parent) = 0; - - inline const PythonState::OwnedFrame& top_frame() - { - return this->python_state.top_frame(); - } - - virtual const OwnedObject& run() const = 0; - virtual void run(const refs::BorrowedObject nrun) = 0; - - - virtual int tp_traverse(visitproc visit, void* arg); - virtual int tp_clear(); - - - // Return the thread state that the greenlet is running in, or - // null if the greenlet is not running or the thread is known - // to have exited. - virtual ThreadState* thread_state() const noexcept = 0; - - // Return true if the greenlet is known to have been running - // (active) in a thread that has now exited. - virtual bool was_running_in_dead_thread() const noexcept = 0; - - // Return a borrowed greenlet that is the Python object - // this object represents. - inline BorrowedGreenlet self() const noexcept - { - return BorrowedGreenlet(this->_self); - } - - // For testing. If this returns true, we should pretend that - // slp_switch() failed. - virtual bool force_slp_switch_error() const noexcept; - - protected: - inline void release_args(); - - // The functions that must not be inlined are declared virtual. - // We also mark them as protected, not private, so that the - // compiler is forced to call them through a function pointer. - // (A sufficiently smart compiler could directly call a private - // virtual function since it can never be overridden in a - // subclass). - - // Also TODO: Switch away from integer error codes and to enums, - // or throw exceptions when possible. - struct switchstack_result_t - { - int status; - Greenlet* the_new_current_greenlet; - OwnedGreenlet origin_greenlet; - - switchstack_result_t() - : status(0), - the_new_current_greenlet(nullptr) - {} - - switchstack_result_t(int err) - : status(err), - the_new_current_greenlet(nullptr) - {} - - switchstack_result_t(int err, Greenlet* state, OwnedGreenlet& origin) - : status(err), - the_new_current_greenlet(state), - origin_greenlet(origin) - { - } - - switchstack_result_t(int err, Greenlet* state, const BorrowedGreenlet& origin) - : status(err), - the_new_current_greenlet(state), - origin_greenlet(origin) - { - } - - switchstack_result_t(const switchstack_result_t& other) - : status(other.status), - the_new_current_greenlet(other.the_new_current_greenlet), - origin_greenlet(other.origin_greenlet) - {} - - switchstack_result_t& operator=(const switchstack_result_t& other) - { - this->status = other.status; - this->the_new_current_greenlet = other.the_new_current_greenlet; - this->origin_greenlet = other.origin_greenlet; - return *this; - } - }; - - OwnedObject on_switchstack_or_initialstub_failure( - Greenlet* target, - const switchstack_result_t& err, - const bool target_was_me=false, - const bool was_initial_stub=false); - - // Returns the previous greenlet we just switched away from. - virtual OwnedGreenlet g_switchstack_success() noexcept; - - - // Check the preconditions for switching to this greenlet; if they - // aren't met, throws PyErrOccurred. Most callers will want to - // catch this and clear the arguments - inline void check_switch_allowed() const; - class GreenletStartedWhileInPython : public std::runtime_error - { - public: - GreenletStartedWhileInPython() : std::runtime_error("") - {} - }; - - protected: - - - /** - Perform a stack switch into this greenlet. - - This temporarily sets the global variable - ``switching_thread_state`` to this greenlet; as soon as the - call to ``slp_switch`` completes, this is reset to NULL. - Consequently, this depends on the GIL. - - TODO: Adopt the stackman model and pass ``slp_switch`` a - callback function and context pointer; this eliminates the - need for global variables altogether. - - Because the stack switch happens in this function, this - function can't use its own stack (local) variables, set - before the switch, and then accessed after the switch. - - Further, you con't even access ``g_thread_state_global`` - before and after the switch from the global variable. - Because it is thread local some compilers cache it in a - register/on the stack, notably new versions of MSVC; this - breaks with strange crashes sometime later, because writing - to anything in ``g_thread_state_global`` after the switch - is actually writing to random memory. For this reason, we - call a non-inlined function to finish the operation. (XXX: - The ``/GT`` MSVC compiler argument probably fixes that.) - - It is very important that stack switch is 'atomic', i.e. no - calls into other Python code allowed (except very few that - are safe), because global variables are very fragile. (This - should no longer be the case with thread-local variables.) - - */ - // Made virtual to facilitate subclassing UserGreenlet for testing. - virtual switchstack_result_t g_switchstack(void); - -class TracingGuard -{ -private: - PyThreadState* tstate; -public: - TracingGuard() - : tstate(PyThreadState_GET()) - { - PyThreadState_EnterTracing(this->tstate); - } - - ~TracingGuard() - { - PyThreadState_LeaveTracing(this->tstate); - this->tstate = nullptr; - } - - inline void CallTraceFunction(const OwnedObject& tracefunc, - const greenlet::refs::ImmortalEventName& event, - const BorrowedGreenlet& origin, - const BorrowedGreenlet& target) - { - // TODO: This calls tracefunc(event, (origin, target)). Add a shortcut - // function for that that's specialized to avoid the Py_BuildValue - // string parsing, or start with just using "ON" format with PyTuple_Pack(2, - // origin, target). That seems like what the N format is meant - // for. - // XXX: Why does event not automatically cast back to a PyObject? - // It tries to call the "deleted constructor ImmortalEventName - // const" instead. - assert(tracefunc); - assert(event); - assert(origin); - assert(target); - greenlet::refs::NewReference retval( - PyObject_CallFunction( - tracefunc.borrow(), - "O(OO)", - event.borrow(), - origin.borrow(), - target.borrow() - )); - if (!retval) { - throw PyErrOccurred::from_current(); - } - } -}; - - static void - g_calltrace(const OwnedObject& tracefunc, - const greenlet::refs::ImmortalEventName& event, - const greenlet::refs::BorrowedGreenlet& origin, - const BorrowedGreenlet& target); - private: - OwnedObject g_switch_finish(const switchstack_result_t& err); - - }; - - class UserGreenlet : public Greenlet - { - private: - static greenlet::PythonAllocator allocator; - OwnedMainGreenlet _main_greenlet; - OwnedObject _run_callable; - OwnedGreenlet _parent; - public: - static void* operator new(size_t UNUSED(count)); - static void operator delete(void* ptr); - - UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent); - virtual ~UserGreenlet(); - - virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const; - virtual bool was_running_in_dead_thread() const noexcept; - virtual ThreadState* thread_state() const noexcept; - virtual OwnedObject g_switch(); - virtual const OwnedObject& run() const - { - if (this->started() || !this->_run_callable) { - throw AttributeError("run"); - } - return this->_run_callable; - } - virtual void run(const refs::BorrowedObject nrun); - - virtual const OwnedGreenlet parent() const; - virtual void parent(const refs::BorrowedObject new_parent); - - virtual const refs::BorrowedMainGreenlet main_greenlet() const; - - virtual void murder_in_place(); - virtual bool belongs_to_thread(const ThreadState* state) const; - virtual int tp_traverse(visitproc visit, void* arg); - virtual int tp_clear(); - class ParentIsCurrentGuard - { - private: - OwnedGreenlet oldparent; - UserGreenlet* greenlet; - G_NO_COPIES_OF_CLS(ParentIsCurrentGuard); - public: - ParentIsCurrentGuard(UserGreenlet* p, const ThreadState& thread_state); - ~ParentIsCurrentGuard(); - }; - virtual OwnedObject throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state); - protected: - virtual switchstack_result_t g_initialstub(void* mark); - private: - // This function isn't meant to return. - // This accepts raw pointers and the ownership of them at the - // same time. The caller should use ``inner_bootstrap(origin.relinquish_ownership())``. - void inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run); - }; - - class BrokenGreenlet : public UserGreenlet - { - private: - static greenlet::PythonAllocator allocator; - public: - bool _force_switch_error = false; - bool _force_slp_switch_error = false; - - static void* operator new(size_t UNUSED(count)); - static void operator delete(void* ptr); - BrokenGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent) - : UserGreenlet(p, the_parent) - {} - virtual ~BrokenGreenlet() - {} - - virtual switchstack_result_t g_switchstack(void); - virtual bool force_slp_switch_error() const noexcept; - - }; - - class MainGreenlet : public Greenlet - { - private: - static greenlet::PythonAllocator allocator; - refs::BorrowedMainGreenlet _self; - ThreadState* _thread_state; - G_NO_COPIES_OF_CLS(MainGreenlet); - public: - static void* operator new(size_t UNUSED(count)); - static void operator delete(void* ptr); - - MainGreenlet(refs::BorrowedMainGreenlet::PyType*, ThreadState*); - virtual ~MainGreenlet(); - - - virtual const OwnedObject& run() const; - virtual void run(const refs::BorrowedObject nrun); - - virtual const OwnedGreenlet parent() const; - virtual void parent(const refs::BorrowedObject new_parent); - - virtual const refs::BorrowedMainGreenlet main_greenlet() const; - - virtual refs::BorrowedMainGreenlet find_main_greenlet_in_lineage() const; - virtual bool was_running_in_dead_thread() const noexcept; - virtual ThreadState* thread_state() const noexcept; - void thread_state(ThreadState*) noexcept; - virtual OwnedObject g_switch(); - virtual int tp_traverse(visitproc visit, void* arg); - }; - - // Instantiate one on the stack to save the GC state, - // and then disable GC. When it goes out of scope, GC will be - // restored to its original state. Sadly, these APIs are only - // available on 3.10+; luckily, we only need them on 3.11+. -#if GREENLET_PY310 - class GCDisabledGuard - { - private: - int was_enabled = 0; - public: - GCDisabledGuard() - : was_enabled(PyGC_IsEnabled()) - { - PyGC_Disable(); - } - - ~GCDisabledGuard() - { - if (this->was_enabled) { - PyGC_Enable(); - } - } - }; -#endif - - OwnedObject& operator<<=(OwnedObject& lhs, greenlet::SwitchingArgs& rhs) noexcept; - - //TODO: Greenlet::g_switch() should call this automatically on its - //return value. As it is, the module code is calling it. - static inline OwnedObject - single_result(const OwnedObject& results) - { - if (results - && PyTuple_Check(results.borrow()) - && PyTuple_GET_SIZE(results.borrow()) == 1) { - PyObject* result = PyTuple_GET_ITEM(results.borrow(), 0); - assert(result); - return OwnedObject::owning(result); - } - return results; - } - - - static OwnedObject - g_handle_exit(const OwnedObject& greenlet_result); - - - template - void operator<<(const PyThreadState *const lhs, T& rhs) - { - rhs.operator<<(lhs); - } - -} // namespace greenlet ; - -#endif diff --git a/myenv/Lib/site-packages/greenlet/TGreenletGlobals.cpp b/myenv/Lib/site-packages/greenlet/TGreenletGlobals.cpp deleted file mode 100644 index 0087d2f..0000000 --- a/myenv/Lib/site-packages/greenlet/TGreenletGlobals.cpp +++ /dev/null @@ -1,94 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of GreenletGlobals. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#ifndef T_GREENLET_GLOBALS -#define T_GREENLET_GLOBALS - -#include "greenlet_refs.hpp" -#include "greenlet_exceptions.hpp" -#include "greenlet_thread_support.hpp" -#include "greenlet_internal.hpp" - -namespace greenlet { - -// This encapsulates what were previously module global "constants" -// established at init time. -// This is a step towards Python3 style module state that allows -// reloading. -// -// In an earlier iteration of this code, we used placement new to be -// able to allocate this object statically still, so that references -// to its members don't incur an extra pointer indirection. -// But under some scenarios, that could result in crashes at -// shutdown because apparently the destructor was getting run twice? -class GreenletGlobals -{ - -public: - const greenlet::refs::ImmortalEventName event_switch; - const greenlet::refs::ImmortalEventName event_throw; - const greenlet::refs::ImmortalException PyExc_GreenletError; - const greenlet::refs::ImmortalException PyExc_GreenletExit; - const greenlet::refs::ImmortalObject empty_tuple; - const greenlet::refs::ImmortalObject empty_dict; - const greenlet::refs::ImmortalString str_run; - Mutex* const thread_states_to_destroy_lock; - greenlet::cleanup_queue_t thread_states_to_destroy; - - GreenletGlobals() : - event_switch("switch"), - event_throw("throw"), - PyExc_GreenletError("greenlet.error"), - PyExc_GreenletExit("greenlet.GreenletExit", PyExc_BaseException), - empty_tuple(Require(PyTuple_New(0))), - empty_dict(Require(PyDict_New())), - str_run("run"), - thread_states_to_destroy_lock(new Mutex()) - {} - - ~GreenletGlobals() - { - // This object is (currently) effectively immortal, and not - // just because of those placement new tricks; if we try to - // deallocate the static object we allocated, and overwrote, - // we would be doing so at C++ teardown time, which is after - // the final Python GIL is released, and we can't use the API - // then. - // (The members will still be destructed, but they also don't - // do any deallocation.) - } - - void queue_to_destroy(ThreadState* ts) const - { - // we're currently accessed through a static const object, - // implicitly marking our members as const, so code can't just - // call push_back (or pop_back) without casting away the - // const. - // - // Do that for callers. - greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy); - q.push_back(ts); - } - - ThreadState* take_next_to_destroy() const - { - greenlet::cleanup_queue_t& q = const_cast(this->thread_states_to_destroy); - ThreadState* result = q.back(); - q.pop_back(); - return result; - } -}; - -}; // namespace greenlet - -static const greenlet::GreenletGlobals* mod_globs; - -#endif // T_GREENLET_GLOBALS diff --git a/myenv/Lib/site-packages/greenlet/TMainGreenlet.cpp b/myenv/Lib/site-packages/greenlet/TMainGreenlet.cpp deleted file mode 100644 index a2a9cfe..0000000 --- a/myenv/Lib/site-packages/greenlet/TMainGreenlet.cpp +++ /dev/null @@ -1,153 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of greenlet::MainGreenlet. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#ifndef T_MAIN_GREENLET_CPP -#define T_MAIN_GREENLET_CPP - -#include "TGreenlet.hpp" - - - -// Protected by the GIL. Incremented when we create a main greenlet, -// in a new thread, decremented when it is destroyed. -static Py_ssize_t G_TOTAL_MAIN_GREENLETS; - -namespace greenlet { -greenlet::PythonAllocator MainGreenlet::allocator; - -void* MainGreenlet::operator new(size_t UNUSED(count)) -{ - return allocator.allocate(1); -} - - -void MainGreenlet::operator delete(void* ptr) -{ - return allocator.deallocate(static_cast(ptr), - 1); -} - - -MainGreenlet::MainGreenlet(PyGreenlet* p, ThreadState* state) - : Greenlet(p, StackState::make_main()), - _self(p), - _thread_state(state) -{ - G_TOTAL_MAIN_GREENLETS++; -} - -MainGreenlet::~MainGreenlet() -{ - G_TOTAL_MAIN_GREENLETS--; - this->tp_clear(); -} - -ThreadState* -MainGreenlet::thread_state() const noexcept -{ - return this->_thread_state; -} - -void -MainGreenlet::thread_state(ThreadState* t) noexcept -{ - assert(!t); - this->_thread_state = t; -} - - -const BorrowedMainGreenlet -MainGreenlet::main_greenlet() const -{ - return this->_self; -} - -BorrowedMainGreenlet -MainGreenlet::find_main_greenlet_in_lineage() const -{ - return BorrowedMainGreenlet(this->_self); -} - -bool -MainGreenlet::was_running_in_dead_thread() const noexcept -{ - return !this->_thread_state; -} - -OwnedObject -MainGreenlet::g_switch() -{ - try { - this->check_switch_allowed(); - } - catch (const PyErrOccurred&) { - this->release_args(); - throw; - } - - switchstack_result_t err = this->g_switchstack(); - if (err.status < 0) { - // XXX: This code path is untested, but it is shared - // with the UserGreenlet path that is tested. - return this->on_switchstack_or_initialstub_failure( - this, - err, - true, // target was me - false // was initial stub - ); - } - - return err.the_new_current_greenlet->g_switch_finish(err); -} - -int -MainGreenlet::tp_traverse(visitproc visit, void* arg) -{ - if (this->_thread_state) { - // we've already traversed main, (self), don't do it again. - int result = this->_thread_state->tp_traverse(visit, arg, false); - if (result) { - return result; - } - } - return Greenlet::tp_traverse(visit, arg); -} - -const OwnedObject& -MainGreenlet::run() const -{ - throw AttributeError("Main greenlets do not have a run attribute."); -} - -void -MainGreenlet::run(const BorrowedObject UNUSED(nrun)) -{ - throw AttributeError("Main greenlets do not have a run attribute."); -} - -void -MainGreenlet::parent(const BorrowedObject raw_new_parent) -{ - if (!raw_new_parent) { - throw AttributeError("can't delete attribute"); - } - throw AttributeError("cannot set the parent of a main greenlet"); -} - -const OwnedGreenlet -MainGreenlet::parent() const -{ - return OwnedGreenlet(); // null becomes None -} - -}; // namespace greenlet - -#endif diff --git a/myenv/Lib/site-packages/greenlet/TPythonState.cpp b/myenv/Lib/site-packages/greenlet/TPythonState.cpp deleted file mode 100644 index cc5dff5..0000000 --- a/myenv/Lib/site-packages/greenlet/TPythonState.cpp +++ /dev/null @@ -1,393 +0,0 @@ -#ifndef GREENLET_PYTHON_STATE_CPP -#define GREENLET_PYTHON_STATE_CPP - -#include -#include "TGreenlet.hpp" - -namespace greenlet { - -PythonState::PythonState() - : _top_frame() -#if GREENLET_USE_CFRAME - ,cframe(nullptr) - ,use_tracing(0) -#endif -#if GREENLET_PY312 - ,py_recursion_depth(0) - ,c_recursion_depth(0) -#else - ,recursion_depth(0) -#endif -#if GREENLET_PY313 - ,delete_later(nullptr) -#else - ,trash_delete_nesting(0) -#endif -#if GREENLET_PY311 - ,current_frame(nullptr) - ,datastack_chunk(nullptr) - ,datastack_top(nullptr) - ,datastack_limit(nullptr) -#endif -{ -#if GREENLET_USE_CFRAME - /* - The PyThreadState->cframe pointer usually points to memory on - the stack, alloceted in a call into PyEval_EvalFrameDefault. - - Initially, before any evaluation begins, it points to the - initial PyThreadState object's ``root_cframe`` object, which is - statically allocated for the lifetime of the thread. - - A greenlet can last for longer than a call to - PyEval_EvalFrameDefault, so we can't set its ``cframe`` pointer - to be the current ``PyThreadState->cframe``; nor could we use - one from the greenlet parent for the same reason. Yet a further - no: we can't allocate one scoped to the greenlet and then - destroy it when the greenlet is deallocated, because inside the - interpreter the _PyCFrame objects form a linked list, and that too - can result in accessing memory beyond its dynamic lifetime (if - the greenlet doesn't actually finish before it dies, its entry - could still be in the list). - - Using the ``root_cframe`` is problematic, though, because its - members are never modified by the interpreter and are set to 0, - meaning that its ``use_tracing`` flag is never updated. We don't - want to modify that value in the ``root_cframe`` ourself: it - *shouldn't* matter much because we should probably never get - back to the point where that's the only cframe on the stack; - even if it did matter, the major consequence of an incorrect - value for ``use_tracing`` is that if its true the interpreter - does some extra work --- however, it's just good code hygiene. - - Our solution: before a greenlet runs, after its initial - creation, it uses the ``root_cframe`` just to have something to - put there. However, once the greenlet is actually switched to - for the first time, ``g_initialstub`` (which doesn't actually - "return" while the greenlet is running) stores a new _PyCFrame on - its local stack, and copies the appropriate values from the - currently running _PyCFrame; this is then made the _PyCFrame for the - newly-minted greenlet. ``g_initialstub`` then proceeds to call - ``glet.run()``, which results in ``PyEval_...`` adding the - _PyCFrame to the list. Switches continue as normal. Finally, when - the greenlet finishes, the call to ``glet.run()`` returns and - the _PyCFrame is taken out of the linked list and the stack value - is now unused and free to expire. - - XXX: I think we can do better. If we're deallocing in the same - thread, can't we traverse the list and unlink our frame? - Can we just keep a reference to the thread state in case we - dealloc in another thread? (Is that even possible if we're still - running and haven't returned from g_initialstub?) - */ - this->cframe = &PyThreadState_GET()->root_cframe; -#endif -} - - -inline void PythonState::may_switch_away() noexcept -{ -#if GREENLET_PY311 - // PyThreadState_GetFrame is probably going to have to allocate a - // new frame object. That may trigger garbage collection. Because - // we call this during the early phases of a switch (it doesn't - // matter to which greenlet, as this has a global effect), if a GC - // triggers a switch away, two things can happen, both bad: - // - We might not get switched back to, halting forward progress. - // this is pathological, but possible. - // - We might get switched back to with a different set of - // arguments or a throw instead of a switch. That would corrupt - // our state (specifically, PyErr_Occurred() and this->args() - // would no longer agree). - // - // Thus, when we call this API, we need to have GC disabled. - // This method serves as a bottleneck we call when maybe beginning - // a switch. In this way, it is always safe -- no risk of GC -- to - // use ``_GetFrame()`` whenever we need to, just as it was in - // <=3.10 (because subsequent calls will be cached and not - // allocate memory). - - GCDisabledGuard no_gc; - Py_XDECREF(PyThreadState_GetFrame(PyThreadState_GET())); -#endif -} - -void PythonState::operator<<(const PyThreadState *const tstate) noexcept -{ - this->_context.steal(tstate->context); -#if GREENLET_USE_CFRAME - /* - IMPORTANT: ``cframe`` is a pointer into the STACK. Thus, because - the call to ``slp_switch()`` changes the contents of the stack, - you cannot read from ``ts_current->cframe`` after that call and - necessarily get the same values you get from reading it here. - Anything you need to restore from now to then must be saved in a - global/threadlocal variable (because we can't use stack - variables here either). For things that need to persist across - the switch, use `will_switch_from`. - */ - this->cframe = tstate->cframe; - #if !GREENLET_PY312 - this->use_tracing = tstate->cframe->use_tracing; - #endif -#endif // GREENLET_USE_CFRAME -#if GREENLET_PY311 - #if GREENLET_PY312 - this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; - this->c_recursion_depth = Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining; - #else // not 312 - this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining; - #endif // GREENLET_PY312 - #if GREENLET_PY313 - this->current_frame = tstate->current_frame; - #elif GREENLET_USE_CFRAME - this->current_frame = tstate->cframe->current_frame; - #endif - this->datastack_chunk = tstate->datastack_chunk; - this->datastack_top = tstate->datastack_top; - this->datastack_limit = tstate->datastack_limit; - - PyFrameObject *frame = PyThreadState_GetFrame((PyThreadState *)tstate); - Py_XDECREF(frame); // PyThreadState_GetFrame gives us a new - // reference. - this->_top_frame.steal(frame); - #if GREENLET_PY313 - this->delete_later = Py_XNewRef(tstate->delete_later); - #elif GREENLET_PY312 - this->trash_delete_nesting = tstate->trash.delete_nesting; - #else // not 312 - this->trash_delete_nesting = tstate->trash_delete_nesting; - #endif // GREENLET_PY312 -#else // Not 311 - this->recursion_depth = tstate->recursion_depth; - this->_top_frame.steal(tstate->frame); - this->trash_delete_nesting = tstate->trash_delete_nesting; -#endif // GREENLET_PY311 -} - -#if GREENLET_PY312 -void GREENLET_NOINLINE(PythonState::unexpose_frames)() -{ - if (!this->top_frame()) { - return; - } - - // See GreenletState::expose_frames() and the comment on frames_were_exposed - // for more information about this logic. - _PyInterpreterFrame *iframe = this->_top_frame->f_frame; - while (iframe != nullptr) { - _PyInterpreterFrame *prev_exposed = iframe->previous; - assert(iframe->frame_obj); - memcpy(&iframe->previous, &iframe->frame_obj->_f_frame_data[0], - sizeof(void *)); - iframe = prev_exposed; - } -} -#else -void PythonState::unexpose_frames() -{} -#endif - -void PythonState::operator>>(PyThreadState *const tstate) noexcept -{ - tstate->context = this->_context.relinquish_ownership(); - /* Incrementing this value invalidates the contextvars cache, - which would otherwise remain valid across switches */ - tstate->context_ver++; -#if GREENLET_USE_CFRAME - tstate->cframe = this->cframe; - /* - If we were tracing, we need to keep tracing. - There should never be the possibility of hitting the - root_cframe here. See note above about why we can't - just copy this from ``origin->cframe->use_tracing``. - */ - #if !GREENLET_PY312 - tstate->cframe->use_tracing = this->use_tracing; - #endif -#endif // GREENLET_USE_CFRAME -#if GREENLET_PY311 - #if GREENLET_PY312 - tstate->py_recursion_remaining = tstate->py_recursion_limit - this->py_recursion_depth; - tstate->c_recursion_remaining = Py_C_RECURSION_LIMIT - this->c_recursion_depth; - this->unexpose_frames(); - #else // \/ 3.11 - tstate->recursion_remaining = tstate->recursion_limit - this->recursion_depth; - #endif // GREENLET_PY312 - #if GREENLET_PY313 - tstate->current_frame = this->current_frame; - #elif GREENLET_USE_CFRAME - tstate->cframe->current_frame = this->current_frame; - #endif - tstate->datastack_chunk = this->datastack_chunk; - tstate->datastack_top = this->datastack_top; - tstate->datastack_limit = this->datastack_limit; - this->_top_frame.relinquish_ownership(); - #if GREENLET_PY313 - Py_XDECREF(tstate->delete_later); - tstate->delete_later = this->delete_later; - Py_CLEAR(this->delete_later); - #elif GREENLET_PY312 - tstate->trash.delete_nesting = this->trash_delete_nesting; - #else // not 3.12 - tstate->trash_delete_nesting = this->trash_delete_nesting; - #endif // GREENLET_PY312 -#else // not 3.11 - tstate->frame = this->_top_frame.relinquish_ownership(); - tstate->recursion_depth = this->recursion_depth; - tstate->trash_delete_nesting = this->trash_delete_nesting; -#endif // GREENLET_PY311 -} - -inline void PythonState::will_switch_from(PyThreadState *const origin_tstate) noexcept -{ -#if GREENLET_USE_CFRAME && !GREENLET_PY312 - // The weird thing is, we don't actually save this for an - // effect on the current greenlet, it's saved for an - // effect on the target greenlet. That is, we want - // continuity of this setting across the greenlet switch. - this->use_tracing = origin_tstate->cframe->use_tracing; -#endif -} - -void PythonState::set_initial_state(const PyThreadState* const tstate) noexcept -{ - this->_top_frame = nullptr; -#if GREENLET_PY312 - this->py_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; - // XXX: TODO: Comment from a reviewer: - // Should this be ``Py_C_RECURSION_LIMIT - tstate->c_recursion_remaining``? - // But to me it looks more like that might not be the right - // initialization either? - this->c_recursion_depth = tstate->py_recursion_limit - tstate->py_recursion_remaining; -#elif GREENLET_PY311 - this->recursion_depth = tstate->recursion_limit - tstate->recursion_remaining; -#else - this->recursion_depth = tstate->recursion_depth; -#endif -} -// TODO: Better state management about when we own the top frame. -int PythonState::tp_traverse(visitproc visit, void* arg, bool own_top_frame) noexcept -{ - Py_VISIT(this->_context.borrow()); - if (own_top_frame) { - Py_VISIT(this->_top_frame.borrow()); - } - return 0; -} - -void PythonState::tp_clear(bool own_top_frame) noexcept -{ - PythonStateContext::tp_clear(); - // If we get here owning a frame, - // we got dealloc'd without being finished. We may or may not be - // in the same thread. - if (own_top_frame) { - this->_top_frame.CLEAR(); - } -} - -#if GREENLET_USE_CFRAME -void PythonState::set_new_cframe(_PyCFrame& frame) noexcept -{ - frame = *PyThreadState_GET()->cframe; - /* Make the target greenlet refer to the stack value. */ - this->cframe = &frame; - /* - And restore the link to the previous frame so this one gets - unliked appropriately. - */ - this->cframe->previous = &PyThreadState_GET()->root_cframe; -} -#endif - -const PythonState::OwnedFrame& PythonState::top_frame() const noexcept -{ - return this->_top_frame; -} - -void PythonState::did_finish(PyThreadState* tstate) noexcept -{ -#if GREENLET_PY311 - // See https://github.com/gevent/gevent/issues/1924 and - // https://github.com/python-greenlet/greenlet/issues/328. In - // short, Python 3.11 allocates memory for frames as a sort of - // linked list that's kept as part of PyThreadState in the - // ``datastack_chunk`` member and friends. These are saved and - // restored as part of switching greenlets. - // - // When we initially switch to a greenlet, we set those to NULL. - // That causes the frame management code to treat this like a - // brand new thread and start a fresh list of chunks, beginning - // with a new "root" chunk. As we make calls in this greenlet, - // those chunks get added, and as calls return, they get popped. - // But the frame code (pystate.c) is careful to make sure that the - // root chunk never gets popped. - // - // Thus, when a greenlet exits for the last time, there will be at - // least a single root chunk that we must be responsible for - // deallocating. - // - // The complex part is that these chunks are allocated and freed - // using ``_PyObject_VirtualAlloc``/``Free``. Those aren't public - // functions, and they aren't exported for linking. It so happens - // that we know they are just thin wrappers around the Arena - // allocator, so we can use that directly to deallocate in a - // compatible way. - // - // CAUTION: Check this implementation detail on every major version. - // - // It might be nice to be able to do this in our destructor, but - // can we be sure that no one else is using that memory? Plus, as - // described below, our pointers may not even be valid anymore. As - // a special case, there is one time that we know we can do this, - // and that's from the destructor of the associated UserGreenlet - // (NOT main greenlet) - PyObjectArenaAllocator alloc; - _PyStackChunk* chunk = nullptr; - if (tstate) { - // We really did finish, we can never be switched to again. - chunk = tstate->datastack_chunk; - // Unfortunately, we can't do much sanity checking. Our - // this->datastack_chunk pointer is out of date (evaluation may - // have popped down through it already) so we can't verify that - // we deallocate it. I don't think we can even check datastack_top - // for the same reason. - - PyObject_GetArenaAllocator(&alloc); - tstate->datastack_chunk = nullptr; - tstate->datastack_limit = nullptr; - tstate->datastack_top = nullptr; - - } - else if (this->datastack_chunk) { - // The UserGreenlet (NOT the main greenlet!) is being deallocated. If we're - // still holding a stack chunk, it's garbage because we know - // we can never switch back to let cPython clean it up. - // Because the last time we got switched away from, and we - // haven't run since then, we know our chain is valid and can - // be dealloced. - chunk = this->datastack_chunk; - PyObject_GetArenaAllocator(&alloc); - } - - if (alloc.free && chunk) { - // In case the arena mechanism has been torn down already. - while (chunk) { - _PyStackChunk *prev = chunk->previous; - chunk->previous = nullptr; - alloc.free(alloc.ctx, chunk, chunk->size); - chunk = prev; - } - } - - this->datastack_chunk = nullptr; - this->datastack_limit = nullptr; - this->datastack_top = nullptr; -#endif -} - - -}; // namespace greenlet - -#endif // GREENLET_PYTHON_STATE_CPP diff --git a/myenv/Lib/site-packages/greenlet/TStackState.cpp b/myenv/Lib/site-packages/greenlet/TStackState.cpp deleted file mode 100644 index 9743ab5..0000000 --- a/myenv/Lib/site-packages/greenlet/TStackState.cpp +++ /dev/null @@ -1,265 +0,0 @@ -#ifndef GREENLET_STACK_STATE_CPP -#define GREENLET_STACK_STATE_CPP - -#include "TGreenlet.hpp" - -namespace greenlet { - -#ifdef GREENLET_USE_STDIO -#include -using std::cerr; -using std::endl; - -std::ostream& operator<<(std::ostream& os, const StackState& s) -{ - os << "StackState(stack_start=" << (void*)s._stack_start - << ", stack_stop=" << (void*)s.stack_stop - << ", stack_copy=" << (void*)s.stack_copy - << ", stack_saved=" << s._stack_saved - << ", stack_prev=" << s.stack_prev - << ", addr=" << &s - << ")"; - return os; -} -#endif - -StackState::StackState(void* mark, StackState& current) - : _stack_start(nullptr), - stack_stop((char*)mark), - stack_copy(nullptr), - _stack_saved(0), - /* Skip a dying greenlet */ - stack_prev(current._stack_start - ? ¤t - : current.stack_prev) -{ -} - -StackState::StackState() - : _stack_start(nullptr), - stack_stop(nullptr), - stack_copy(nullptr), - _stack_saved(0), - stack_prev(nullptr) -{ -} - -StackState::StackState(const StackState& other) -// can't use a delegating constructor because of -// MSVC for Python 2.7 - : _stack_start(nullptr), - stack_stop(nullptr), - stack_copy(nullptr), - _stack_saved(0), - stack_prev(nullptr) -{ - this->operator=(other); -} - -StackState& StackState::operator=(const StackState& other) -{ - if (&other == this) { - return *this; - } - if (other._stack_saved) { - throw std::runtime_error("Refusing to steal memory."); - } - - //If we have memory allocated, dispose of it - this->free_stack_copy(); - - this->_stack_start = other._stack_start; - this->stack_stop = other.stack_stop; - this->stack_copy = other.stack_copy; - this->_stack_saved = other._stack_saved; - this->stack_prev = other.stack_prev; - return *this; -} - -inline void StackState::free_stack_copy() noexcept -{ - PyMem_Free(this->stack_copy); - this->stack_copy = nullptr; - this->_stack_saved = 0; -} - -inline void StackState::copy_heap_to_stack(const StackState& current) noexcept -{ - - /* Restore the heap copy back into the C stack */ - if (this->_stack_saved != 0) { - memcpy(this->_stack_start, this->stack_copy, this->_stack_saved); - this->free_stack_copy(); - } - StackState* owner = const_cast(¤t); - if (!owner->_stack_start) { - owner = owner->stack_prev; /* greenlet is dying, skip it */ - } - while (owner && owner->stack_stop <= this->stack_stop) { - // cerr << "\tOwner: " << owner << endl; - owner = owner->stack_prev; /* find greenlet with more stack */ - } - this->stack_prev = owner; - // cerr << "\tFinished with: " << *this << endl; -} - -inline int StackState::copy_stack_to_heap_up_to(const char* const stop) noexcept -{ - /* Save more of g's stack into the heap -- at least up to 'stop' - g->stack_stop |________| - | | - | __ stop . . . . . - | | ==> . . - |________| _______ - | | | | - | | | | - g->stack_start | | |_______| g->stack_copy - */ - intptr_t sz1 = this->_stack_saved; - intptr_t sz2 = stop - this->_stack_start; - assert(this->_stack_start); - if (sz2 > sz1) { - char* c = (char*)PyMem_Realloc(this->stack_copy, sz2); - if (!c) { - PyErr_NoMemory(); - return -1; - } - memcpy(c + sz1, this->_stack_start + sz1, sz2 - sz1); - this->stack_copy = c; - this->_stack_saved = sz2; - } - return 0; -} - -inline int StackState::copy_stack_to_heap(char* const stackref, - const StackState& current) noexcept -{ - /* must free all the C stack up to target_stop */ - const char* const target_stop = this->stack_stop; - - StackState* owner = const_cast(¤t); - assert(owner->_stack_saved == 0); // everything is present on the stack - if (!owner->_stack_start) { - owner = owner->stack_prev; /* not saved if dying */ - } - else { - owner->_stack_start = stackref; - } - - while (owner->stack_stop < target_stop) { - /* ts_current is entierely within the area to free */ - if (owner->copy_stack_to_heap_up_to(owner->stack_stop)) { - return -1; /* XXX */ - } - owner = owner->stack_prev; - } - if (owner != this) { - if (owner->copy_stack_to_heap_up_to(target_stop)) { - return -1; /* XXX */ - } - } - return 0; -} - -inline bool StackState::started() const noexcept -{ - return this->stack_stop != nullptr; -} - -inline bool StackState::main() const noexcept -{ - return this->stack_stop == (char*)-1; -} - -inline bool StackState::active() const noexcept -{ - return this->_stack_start != nullptr; -} - -inline void StackState::set_active() noexcept -{ - assert(this->_stack_start == nullptr); - this->_stack_start = (char*)1; -} - -inline void StackState::set_inactive() noexcept -{ - this->_stack_start = nullptr; - // XXX: What if we still have memory out there? - // That case is actually triggered by - // test_issue251_issue252_explicit_reference_not_collectable (greenlet.tests.test_leaks.TestLeaks) - // and - // test_issue251_issue252_need_to_collect_in_background - // (greenlet.tests.test_leaks.TestLeaks) - // - // Those objects never get deallocated, so the destructor never - // runs. - // It *seems* safe to clean up the memory here? - if (this->_stack_saved) { - this->free_stack_copy(); - } -} - -inline intptr_t StackState::stack_saved() const noexcept -{ - return this->_stack_saved; -} - -inline char* StackState::stack_start() const noexcept -{ - return this->_stack_start; -} - - -inline StackState StackState::make_main() noexcept -{ - StackState s; - s._stack_start = (char*)1; - s.stack_stop = (char*)-1; - return s; -} - -StackState::~StackState() -{ - if (this->_stack_saved != 0) { - this->free_stack_copy(); - } -} - -void StackState::copy_from_stack(void* vdest, const void* vsrc, size_t n) const -{ - char* dest = static_cast(vdest); - const char* src = static_cast(vsrc); - if (src + n <= this->_stack_start - || src >= this->_stack_start + this->_stack_saved - || this->_stack_saved == 0) { - // Nothing we're copying was spilled from the stack - memcpy(dest, src, n); - return; - } - - if (src < this->_stack_start) { - // Copy the part before the saved stack. - // We know src + n > _stack_start due to the test above. - const size_t nbefore = this->_stack_start - src; - memcpy(dest, src, nbefore); - dest += nbefore; - src += nbefore; - n -= nbefore; - } - // We know src >= _stack_start after the before-copy, and - // src < _stack_start + _stack_saved due to the first if condition - size_t nspilled = std::min(n, this->_stack_start + this->_stack_saved - src); - memcpy(dest, this->stack_copy + (src - this->_stack_start), nspilled); - dest += nspilled; - src += nspilled; - n -= nspilled; - if (n > 0) { - // Copy the part after the saved stack - memcpy(dest, src, n); - } -} - -}; // namespace greenlet - -#endif // GREENLET_STACK_STATE_CPP diff --git a/myenv/Lib/site-packages/greenlet/TThreadState.hpp b/myenv/Lib/site-packages/greenlet/TThreadState.hpp deleted file mode 100644 index e4e6f6c..0000000 --- a/myenv/Lib/site-packages/greenlet/TThreadState.hpp +++ /dev/null @@ -1,497 +0,0 @@ -#ifndef GREENLET_THREAD_STATE_HPP -#define GREENLET_THREAD_STATE_HPP - -#include -#include - -#include "greenlet_internal.hpp" -#include "greenlet_refs.hpp" -#include "greenlet_thread_support.hpp" - -using greenlet::refs::BorrowedObject; -using greenlet::refs::BorrowedGreenlet; -using greenlet::refs::BorrowedMainGreenlet; -using greenlet::refs::OwnedMainGreenlet; -using greenlet::refs::OwnedObject; -using greenlet::refs::OwnedGreenlet; -using greenlet::refs::OwnedList; -using greenlet::refs::PyErrFetchParam; -using greenlet::refs::PyArgParseParam; -using greenlet::refs::ImmortalString; -using greenlet::refs::CreatedModule; -using greenlet::refs::PyErrPieces; -using greenlet::refs::NewReference; - -namespace greenlet { -/** - * Thread-local state of greenlets. - * - * Each native thread will get exactly one of these objects, - * automatically accessed through the best available thread-local - * mechanism the compiler supports (``thread_local`` for C++11 - * compilers or ``__thread``/``declspec(thread)`` for older GCC/clang - * or MSVC, respectively.) - * - * Previously, we kept thread-local state mostly in a bunch of - * ``static volatile`` variables in the main greenlet file.. This had - * the problem of requiring extra checks, loops, and great care - * accessing these variables if we potentially invoked any Python code - * that could release the GIL, because the state could change out from - * under us. Making the variables thread-local solves this problem. - * - * When we detected that a greenlet API accessing the current greenlet - * was invoked from a different thread than the greenlet belonged to, - * we stored a reference to the greenlet in the Python thread - * dictionary for the thread the greenlet belonged to. This could lead - * to memory leaks if the thread then exited (because of a reference - * cycle, as greenlets referred to the thread dictionary, and deleting - * non-current greenlets leaked their frame plus perhaps arguments on - * the C stack). If a thread exited while still having running - * greenlet objects (perhaps that had just switched back to the main - * greenlet), and did not invoke one of the greenlet APIs *in that - * thread, immediately before it exited, without some other thread - * then being invoked*, such a leak was guaranteed. - * - * This can be partly solved by using compiler thread-local variables - * instead of the Python thread dictionary, thus avoiding a cycle. - * - * To fully solve this problem, we need a reliable way to know that a - * thread is done and we should clean up the main greenlet. On POSIX, - * we can use the destructor function of ``pthread_key_create``, but - * there's nothing similar on Windows; a C++11 thread local object - * reliably invokes its destructor when the thread it belongs to exits - * (non-C++11 compilers offer ``__thread`` or ``declspec(thread)`` to - * create thread-local variables, but they can't hold C++ objects that - * invoke destructors; the C++11 version is the most portable solution - * I found). When the thread exits, we can drop references and - * otherwise manipulate greenlets and frames that we know can no - * longer be switched to. For compilers that don't support C++11 - * thread locals, we have a solution that uses the python thread - * dictionary, though it may not collect everything as promptly as - * other compilers do, if some other library is using the thread - * dictionary and has a cycle or extra reference. - * - * There are two small wrinkles. The first is that when the thread - * exits, it is too late to actually invoke Python APIs: the Python - * thread state is gone, and the GIL is released. To solve *this* - * problem, our destructor uses ``Py_AddPendingCall`` to transfer the - * destruction work to the main thread. (This is not an issue for the - * dictionary solution.) - * - * The second is that once the thread exits, the thread local object - * is invalid and we can't even access a pointer to it, so we can't - * pass it to ``Py_AddPendingCall``. This is handled by actually using - * a second object that's thread local (ThreadStateCreator) and having - * it dynamically allocate this object so it can live until the - * pending call runs. - */ - - - -class ThreadState { -private: - // As of commit 08ad1dd7012b101db953f492e0021fb08634afad - // this class needed 56 bytes in o Py_DEBUG build - // on 64-bit macOS 11. - // Adding the vector takes us up to 80 bytes () - - /* Strong reference to the main greenlet */ - OwnedMainGreenlet main_greenlet; - - /* Strong reference to the current greenlet. */ - OwnedGreenlet current_greenlet; - - /* Strong reference to the trace function, if any. */ - OwnedObject tracefunc; - - typedef std::vector > deleteme_t; - /* A vector of raw PyGreenlet pointers representing things that need - deleted when this thread is running. The vector owns the - references, but you need to manually INCREF/DECREF as you use - them. We don't use a vector because we - make copy of this vector, and that would become O(n) as all the - refcounts are incremented in the copy. - */ - deleteme_t deleteme; - -#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED - void* exception_state; -#endif - - static std::clock_t _clocks_used_doing_gc; - static ImmortalString get_referrers_name; - static PythonAllocator allocator; - - G_NO_COPIES_OF_CLS(ThreadState); - - - // Allocates a main greenlet for the thread state. If this fails, - // exits the process. Called only during constructing a ThreadState. - MainGreenlet* alloc_main() - { - PyGreenlet* gmain; - - /* create the main greenlet for this thread */ - gmain = reinterpret_cast(PyType_GenericAlloc(&PyGreenlet_Type, 0)); - if (gmain == NULL) { - throw PyFatalError("alloc_main failed to alloc"); //exits the process - } - - MainGreenlet* const main = new MainGreenlet(gmain, this); - - assert(Py_REFCNT(gmain) == 1); - assert(gmain->pimpl == main); - return main; - } - - -public: - static void* operator new(size_t UNUSED(count)) - { - return ThreadState::allocator.allocate(1); - } - - static void operator delete(void* ptr) - { - return ThreadState::allocator.deallocate(static_cast(ptr), - 1); - } - - static void init() - { - ThreadState::get_referrers_name = "get_referrers"; - ThreadState::_clocks_used_doing_gc = 0; - } - - ThreadState() - { - -#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED - this->exception_state = slp_get_exception_state(); -#endif - - // XXX: Potentially dangerous, exposing a not fully - // constructed object. - MainGreenlet* const main = this->alloc_main(); - this->main_greenlet = OwnedMainGreenlet::consuming( - main->self() - ); - assert(this->main_greenlet); - this->current_greenlet = main->self(); - // The main greenlet starts with 1 refs: The returned one. We - // then copied it to the current greenlet. - assert(this->main_greenlet.REFCNT() == 2); - } - - inline void restore_exception_state() - { -#ifdef GREENLET_NEEDS_EXCEPTION_STATE_SAVED - // It's probably important this be inlined and only call C - // functions to avoid adding an SEH frame. - slp_set_exception_state(this->exception_state); -#endif - } - - inline bool has_main_greenlet() const noexcept - { - return bool(this->main_greenlet); - } - - // Called from the ThreadStateCreator when we're in non-standard - // threading mode. In that case, there is an object in the Python - // thread state dictionary that points to us. The main greenlet - // also traverses into us, in which case it's crucial not to - // traverse back into the main greenlet. - int tp_traverse(visitproc visit, void* arg, bool traverse_main=true) - { - if (traverse_main) { - Py_VISIT(main_greenlet.borrow_o()); - } - if (traverse_main || current_greenlet != main_greenlet) { - Py_VISIT(current_greenlet.borrow_o()); - } - Py_VISIT(tracefunc.borrow()); - return 0; - } - - inline BorrowedMainGreenlet borrow_main_greenlet() const noexcept - { - assert(this->main_greenlet); - assert(this->main_greenlet.REFCNT() >= 2); - return this->main_greenlet; - }; - - inline OwnedMainGreenlet get_main_greenlet() const noexcept - { - return this->main_greenlet; - } - - /** - * In addition to returning a new reference to the currunt - * greenlet, this performs any maintenance needed. - */ - inline OwnedGreenlet get_current() - { - /* green_dealloc() cannot delete greenlets from other threads, so - it stores them in the thread dict; delete them now. */ - this->clear_deleteme_list(); - //assert(this->current_greenlet->main_greenlet == this->main_greenlet); - //assert(this->main_greenlet->main_greenlet == this->main_greenlet); - return this->current_greenlet; - } - - /** - * As for non-const get_current(); - */ - inline BorrowedGreenlet borrow_current() - { - this->clear_deleteme_list(); - return this->current_greenlet; - } - - /** - * Does no maintenance. - */ - inline OwnedGreenlet get_current() const - { - return this->current_greenlet; - } - - template - inline bool is_current(const refs::PyObjectPointer& obj) const - { - return this->current_greenlet.borrow_o() == obj.borrow_o(); - } - - inline void set_current(const OwnedGreenlet& target) - { - this->current_greenlet = target; - } - -private: - /** - * Deref and remove the greenlets from the deleteme list. Must be - * holding the GIL. - * - * If *murder* is true, then we must be called from a different - * thread than the one that these greenlets were running in. - * In that case, if the greenlet was actually running, we destroy - * the frame reference and otherwise make it appear dead before - * proceeding; otherwise, we would try (and fail) to raise an - * exception in it and wind up right back in this list. - */ - inline void clear_deleteme_list(const bool murder=false) - { - if (!this->deleteme.empty()) { - // It's possible we could add items to this list while - // running Python code if there's a thread switch, so we - // need to defensively copy it before that can happen. - deleteme_t copy = this->deleteme; - this->deleteme.clear(); // in case things come back on the list - for(deleteme_t::iterator it = copy.begin(), end = copy.end(); - it != end; - ++it ) { - PyGreenlet* to_del = *it; - if (murder) { - // Force each greenlet to appear dead; we can't raise an - // exception into it anymore anyway. - to_del->pimpl->murder_in_place(); - } - - // The only reference to these greenlets should be in - // this list, decreffing them should let them be - // deleted again, triggering calls to green_dealloc() - // in the correct thread (if we're not murdering). - // This may run arbitrary Python code and switch - // threads or greenlets! - Py_DECREF(to_del); - if (PyErr_Occurred()) { - PyErr_WriteUnraisable(nullptr); - PyErr_Clear(); - } - } - } - } - -public: - - /** - * Returns a new reference, or a false object. - */ - inline OwnedObject get_tracefunc() const - { - return tracefunc; - }; - - - inline void set_tracefunc(BorrowedObject tracefunc) - { - assert(tracefunc); - if (tracefunc == BorrowedObject(Py_None)) { - this->tracefunc.CLEAR(); - } - else { - this->tracefunc = tracefunc; - } - } - - /** - * Given a reference to a greenlet that some other thread - * attempted to delete (has a refcount of 0) store it for later - * deletion when the thread this state belongs to is current. - */ - inline void delete_when_thread_running(PyGreenlet* to_del) - { - Py_INCREF(to_del); - this->deleteme.push_back(to_del); - } - - /** - * Set to std::clock_t(-1) to disable. - */ - inline static std::clock_t& clocks_used_doing_gc() - { - return ThreadState::_clocks_used_doing_gc; - } - - ~ThreadState() - { - if (!PyInterpreterState_Head()) { - // We shouldn't get here (our callers protect us) - // but if we do, all we can do is bail early. - return; - } - - // We should not have an "origin" greenlet; that only exists - // for the temporary time during a switch, which should not - // be in progress as the thread dies. - //assert(!this->switching_state.origin); - - this->tracefunc.CLEAR(); - - // Forcibly GC as much as we can. - this->clear_deleteme_list(true); - - // The pending call did this. - assert(this->main_greenlet->thread_state() == nullptr); - - // If the main greenlet is the current greenlet, - // then we "fell off the end" and the thread died. - // It's possible that there is some other greenlet that - // switched to us, leaving a reference to the main greenlet - // on the stack, somewhere uncollectible. Try to detect that. - if (this->current_greenlet == this->main_greenlet && this->current_greenlet) { - assert(this->current_greenlet->is_currently_running_in_some_thread()); - // Drop one reference we hold. - this->current_greenlet.CLEAR(); - assert(!this->current_greenlet); - // Only our reference to the main greenlet should be left, - // But hold onto the pointer in case we need to do extra cleanup. - PyGreenlet* old_main_greenlet = this->main_greenlet.borrow(); - Py_ssize_t cnt = this->main_greenlet.REFCNT(); - this->main_greenlet.CLEAR(); - if (ThreadState::_clocks_used_doing_gc != std::clock_t(-1) - && cnt == 2 && Py_REFCNT(old_main_greenlet) == 1) { - // Highly likely that the reference is somewhere on - // the stack, not reachable by GC. Verify. - // XXX: This is O(n) in the total number of objects. - // TODO: Add a way to disable this at runtime, and - // another way to report on it. - std::clock_t begin = std::clock(); - NewReference gc(PyImport_ImportModule("gc")); - if (gc) { - OwnedObject get_referrers = gc.PyRequireAttr(ThreadState::get_referrers_name); - OwnedList refs(get_referrers.PyCall(old_main_greenlet)); - if (refs && refs.empty()) { - assert(refs.REFCNT() == 1); - // We found nothing! So we left a dangling - // reference: Probably the last thing some - // other greenlet did was call - // 'getcurrent().parent.switch()' to switch - // back to us. Clean it up. This will be the - // case on CPython 3.7 and newer, as they use - // an internal calling conversion that avoids - // creating method objects and storing them on - // the stack. - Py_DECREF(old_main_greenlet); - } - else if (refs - && refs.size() == 1 - && PyCFunction_Check(refs.at(0)) - && Py_REFCNT(refs.at(0)) == 2) { - assert(refs.REFCNT() == 1); - // Ok, we found a C method that refers to the - // main greenlet, and its only referenced - // twice, once in the list we just created, - // once from...somewhere else. If we can't - // find where else, then this is a leak. - // This happens in older versions of CPython - // that create a bound method object somewhere - // on the stack that we'll never get back to. - if (PyCFunction_GetFunction(refs.at(0).borrow()) == (PyCFunction)green_switch) { - BorrowedObject function_w = refs.at(0); - refs.clear(); // destroy the reference - // from the list. - // back to one reference. Can *it* be - // found? - assert(function_w.REFCNT() == 1); - refs = get_referrers.PyCall(function_w); - if (refs && refs.empty()) { - // Nope, it can't be found so it won't - // ever be GC'd. Drop it. - Py_CLEAR(function_w); - } - } - } - std::clock_t end = std::clock(); - ThreadState::_clocks_used_doing_gc += (end - begin); - } - } - } - - // We need to make sure this greenlet appears to be dead, - // because otherwise deallocing it would fail to raise an - // exception in it (the thread is dead) and put it back in our - // deleteme list. - if (this->current_greenlet) { - this->current_greenlet->murder_in_place(); - this->current_greenlet.CLEAR(); - } - - if (this->main_greenlet) { - // Couldn't have been the main greenlet that was running - // when the thread exited (because we already cleared this - // pointer if it was). This shouldn't be possible? - - // If the main greenlet was current when the thread died (it - // should be, right?) then we cleared its self pointer above - // when we cleared the current greenlet's main greenlet pointer. - // assert(this->main_greenlet->main_greenlet == this->main_greenlet - // || !this->main_greenlet->main_greenlet); - // // self reference, probably gone - // this->main_greenlet->main_greenlet.CLEAR(); - - // This will actually go away when the ivar is destructed. - this->main_greenlet.CLEAR(); - } - - if (PyErr_Occurred()) { - PyErr_WriteUnraisable(NULL); - PyErr_Clear(); - } - - } - -}; - -ImmortalString ThreadState::get_referrers_name(nullptr); -PythonAllocator ThreadState::allocator; -std::clock_t ThreadState::_clocks_used_doing_gc(0); - - - - - -}; // namespace greenlet - -#endif diff --git a/myenv/Lib/site-packages/greenlet/TThreadStateCreator.hpp b/myenv/Lib/site-packages/greenlet/TThreadStateCreator.hpp deleted file mode 100644 index 2ec7ab5..0000000 --- a/myenv/Lib/site-packages/greenlet/TThreadStateCreator.hpp +++ /dev/null @@ -1,102 +0,0 @@ -#ifndef GREENLET_THREAD_STATE_CREATOR_HPP -#define GREENLET_THREAD_STATE_CREATOR_HPP - -#include -#include - -#include "greenlet_internal.hpp" -#include "greenlet_refs.hpp" -#include "greenlet_thread_support.hpp" - -#include "TThreadState.hpp" - -namespace greenlet { - - -typedef void (*ThreadStateDestructor)(ThreadState* const); - -template -class ThreadStateCreator -{ -private: - // Initialized to 1, and, if still 1, created on access. - // Set to 0 on destruction. - ThreadState* _state; - G_NO_COPIES_OF_CLS(ThreadStateCreator); - - inline bool has_initialized_state() const noexcept - { - return this->_state != (ThreadState*)1; - } - - inline bool has_state() const noexcept - { - return this->has_initialized_state() && this->_state != nullptr; - } - -public: - - // Only one of these, auto created per thread. - // Constructing the state constructs the MainGreenlet. - ThreadStateCreator() : - _state((ThreadState*)1) - { - } - - ~ThreadStateCreator() - { - if (this->has_state()) { - Destructor(this->_state); - } - - this->_state = nullptr; - } - - inline ThreadState& state() - { - // The main greenlet will own this pointer when it is created, - // which will be right after this. The plan is to give every - // greenlet a pointer to the main greenlet for the thread it - // runs in; if we are doing something cross-thread, we need to - // access the pointer from the main greenlet. Deleting the - // thread, and hence the thread-local storage, will delete the - // state pointer in the main greenlet. - if (!this->has_initialized_state()) { - // XXX: Assuming allocation never fails - this->_state = new ThreadState; - // For non-standard threading, we need to store an object - // in the Python thread state dictionary so that it can be - // DECREF'd when the thread ends (ideally; the dict could - // last longer) and clean this object up. - } - if (!this->_state) { - throw std::runtime_error("Accessing state after destruction."); - } - return *this->_state; - } - - operator ThreadState&() - { - return this->state(); - } - - operator ThreadState*() - { - return &this->state(); - } - - inline int tp_traverse(visitproc visit, void* arg) - { - if (this->has_state()) { - return this->_state->tp_traverse(visit, arg); - } - return 0; - } - -}; - - - -}; // namespace greenlet - -#endif diff --git a/myenv/Lib/site-packages/greenlet/TThreadStateDestroy.cpp b/myenv/Lib/site-packages/greenlet/TThreadStateDestroy.cpp deleted file mode 100644 index 37fcc8c..0000000 --- a/myenv/Lib/site-packages/greenlet/TThreadStateDestroy.cpp +++ /dev/null @@ -1,258 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of the ThreadState destructors. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#ifndef T_THREADSTATE_DESTROY -#define T_THREADSTATE_DESTROY - -#include "TGreenlet.hpp" - -#include "greenlet_thread_support.hpp" -#include "greenlet_cpython_add_pending.hpp" -#include "greenlet_compiler_compat.hpp" -#include "TGreenletGlobals.cpp" -#include "TThreadState.hpp" -#include "TThreadStateCreator.hpp" - -namespace greenlet { - -extern "C" { - -struct ThreadState_DestroyNoGIL -{ - /** - This function uses the same lock that the PendingCallback does - */ - static void - MarkGreenletDeadAndQueueCleanup(ThreadState* const state) - { -#if GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK - return; -#endif - // We are *NOT* holding the GIL. Our thread is in the middle - // of its death throes and the Python thread state is already - // gone so we can't use most Python APIs. One that is safe is - // ``Py_AddPendingCall``, unless the interpreter itself has - // been torn down. There is a limited number of calls that can - // be queued: 32 (NPENDINGCALLS) in CPython 3.10, so we - // coalesce these calls using our own queue. - - if (!MarkGreenletDeadIfNeeded(state)) { - // No state, or no greenlet - return; - } - - // XXX: Because we don't have the GIL, this is a race condition. - if (!PyInterpreterState_Head()) { - // We have to leak the thread state, if the - // interpreter has shut down when we're getting - // deallocated, we can't run the cleanup code that - // deleting it would imply. - return; - } - - AddToCleanupQueue(state); - - } - -private: - - // If the state has an allocated main greenlet: - // - mark the greenlet as dead by disassociating it from the state; - // - return 1 - // Otherwise, return 0. - static bool - MarkGreenletDeadIfNeeded(ThreadState* const state) - { - if (state && state->has_main_greenlet()) { - // mark the thread as dead ASAP. - // this is racy! If we try to throw or switch to a - // greenlet from this thread from some other thread before - // we clear the state pointer, it won't realize the state - // is dead which can crash the process. - PyGreenlet* p(state->borrow_main_greenlet().borrow()); - assert(p->pimpl->thread_state() == state || p->pimpl->thread_state() == nullptr); - dynamic_cast(p->pimpl)->thread_state(nullptr); - return true; - } - return false; - } - - static void - AddToCleanupQueue(ThreadState* const state) - { - assert(state && state->has_main_greenlet()); - - // NOTE: Because we're not holding the GIL here, some other - // Python thread could run and call ``os.fork()``, which would - // be bad if that happened while we are holding the cleanup - // lock (it wouldn't function in the child process). - // Make a best effort to try to keep the duration we hold the - // lock short. - // TODO: On platforms that support it, use ``pthread_atfork`` to - // drop this lock. - LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); - - mod_globs->queue_to_destroy(state); - if (mod_globs->thread_states_to_destroy.size() == 1) { - // We added the first item to the queue. We need to schedule - // the cleanup. - - // A size greater than 1 means that we have already added the pending call, - // and in fact, it may be executing now. - // If it is executing, our lock makes sure that it will see the item we just added - // to the queue on its next iteration (after we release the lock) - // - // A size of 1 means there is no pending call, OR the pending call is - // currently executing, has dropped the lock, and is deleting the last item - // from the queue; its next iteration will go ahead and delete the item we just added. - // And the pending call we schedule here will have no work to do. - int result = AddPendingCall( - PendingCallback_DestroyQueueWithGIL, - nullptr); - if (result < 0) { - // Hmm, what can we do here? - fprintf(stderr, - "greenlet: WARNING: failed in call to Py_AddPendingCall; " - "expect a memory leak.\n"); - } - } - } - - static int - PendingCallback_DestroyQueueWithGIL(void* UNUSED(arg)) - { - // We're holding the GIL here, so no Python code should be able to - // run to call ``os.fork()``. - while (1) { - ThreadState* to_destroy; - { - LockGuard cleanup_lock(*mod_globs->thread_states_to_destroy_lock); - if (mod_globs->thread_states_to_destroy.empty()) { - break; - } - to_destroy = mod_globs->take_next_to_destroy(); - } - assert(to_destroy); - assert(to_destroy->has_main_greenlet()); - // Drop the lock while we do the actual deletion. - // This allows other calls to MarkGreenletDeadAndQueueCleanup - // to enter and add to our queue. - DestroyOneWithGIL(to_destroy); - } - return 0; - } - - static void - DestroyOneWithGIL(const ThreadState* const state) - { - // Holding the GIL. - // Passed a non-shared pointer to the actual thread state. - // state -> main greenlet - assert(state->has_main_greenlet()); - PyGreenlet* main(state->borrow_main_greenlet()); - // When we need to do cross-thread operations, we check this. - // A NULL value means the thread died some time ago. - // We do this here, rather than in a Python dealloc function - // for the greenlet, in case there's still a reference out - // there. - dynamic_cast(main->pimpl)->thread_state(nullptr); - - delete state; // Deleting this runs the destructor, DECREFs the main greenlet. - } - - // ensure this is actually defined. - static_assert(GREENLET_BROKEN_PY_ADD_PENDING == 1 || GREENLET_BROKEN_PY_ADD_PENDING == 0, - "GREENLET_BROKEN_PY_ADD_PENDING not defined correctly."); - -#if GREENLET_BROKEN_PY_ADD_PENDING - static int _push_pending_call(struct _pending_calls *pending, - int (*func)(void *), void *arg) - { - int i = pending->last; - int j = (i + 1) % NPENDINGCALLS; - if (j == pending->first) { - return -1; /* Queue full */ - } - pending->calls[i].func = func; - pending->calls[i].arg = arg; - pending->last = j; - return 0; - } - - static int AddPendingCall(int (*func)(void *), void *arg) - { - _PyRuntimeState *runtime = &_PyRuntime; - if (!runtime) { - // obviously impossible - return 0; - } - struct _pending_calls *pending = &runtime->ceval.pending; - if (!pending->lock) { - return 0; - } - int result = 0; - PyThread_acquire_lock(pending->lock, WAIT_LOCK); - if (!pending->finishing) { - result = _push_pending_call(pending, func, arg); - } - PyThread_release_lock(pending->lock); - SIGNAL_PENDING_CALLS(&runtime->ceval); - return result; - } -#else - // Python < 3.8 or >= 3.9 - static int AddPendingCall(int (*func)(void*), void* arg) - { - // If the interpreter is in the middle of finalizing, we can't add a - // pending call. Trying to do so will end up in a SIGSEGV, as - // Py_AddPendingCall will not be able to get the interpreter and will - // try to dereference a NULL pointer. It's possible this can still - // segfault if we happen to get context switched, and maybe we should - // just always implement our own AddPendingCall, but I'd like to see if - // this works first -#if GREENLET_PY313 - if (Py_IsFinalizing()) { -#else - if (_Py_IsFinalizing()) { -#endif -#ifdef GREENLET_DEBUG - // No need to log in the general case. Yes, we'll leak, - // but we're shutting down so it should be ok. - fprintf(stderr, - "greenlet: WARNING: Interpreter is finalizing. Ignoring " - "call to Py_AddPendingCall; \n"); -#endif - return 0; - } - return Py_AddPendingCall(func, arg); - } -#endif - - - - -}; -}; - -}; // namespace greenlet - -// The intent when GET_THREAD_STATE() is needed multiple times in a -// function is to take a reference to its return value in a local -// variable, to avoid the thread-local indirection. On some platforms -// (macOS), accessing a thread-local involves a function call (plus an -// initial function call in each function that uses a thread local); -// in contrast, static volatile variables are at some pre-computed -// offset. -typedef greenlet::ThreadStateCreator ThreadStateCreator; -static thread_local ThreadStateCreator g_thread_state_global; -#define GET_THREAD_STATE() g_thread_state_global - -#endif //T_THREADSTATE_DESTROY diff --git a/myenv/Lib/site-packages/greenlet/TUserGreenlet.cpp b/myenv/Lib/site-packages/greenlet/TUserGreenlet.cpp deleted file mode 100644 index 73a8133..0000000 --- a/myenv/Lib/site-packages/greenlet/TUserGreenlet.cpp +++ /dev/null @@ -1,662 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/** - * Implementation of greenlet::UserGreenlet. - * - * Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#ifndef T_USER_GREENLET_CPP -#define T_USER_GREENLET_CPP - -#include "greenlet_internal.hpp" -#include "TGreenlet.hpp" - -#include "TThreadStateDestroy.cpp" - - -namespace greenlet { -using greenlet::refs::BorrowedMainGreenlet; -greenlet::PythonAllocator UserGreenlet::allocator; - -void* UserGreenlet::operator new(size_t UNUSED(count)) -{ - return allocator.allocate(1); -} - - -void UserGreenlet::operator delete(void* ptr) -{ - return allocator.deallocate(static_cast(ptr), - 1); -} - - -UserGreenlet::UserGreenlet(PyGreenlet* p, BorrowedGreenlet the_parent) - : Greenlet(p), _parent(the_parent) -{ -} - -UserGreenlet::~UserGreenlet() -{ - // Python 3.11: If we don't clear out the raw frame datastack - // when deleting an unfinished greenlet, - // TestLeaks.test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main fails. - this->python_state.did_finish(nullptr); - this->tp_clear(); -} - - -const BorrowedMainGreenlet -UserGreenlet::main_greenlet() const -{ - return this->_main_greenlet; -} - - -BorrowedMainGreenlet -UserGreenlet::find_main_greenlet_in_lineage() const -{ - if (this->started()) { - assert(this->_main_greenlet); - return BorrowedMainGreenlet(this->_main_greenlet); - } - - if (!this->_parent) { - /* garbage collected greenlet in chain */ - // XXX: WHAT? - return BorrowedMainGreenlet(nullptr); - } - - return this->_parent->find_main_greenlet_in_lineage(); -} - - -/** - * CAUTION: This will allocate memory and may trigger garbage - * collection and arbitrary Python code. - */ -OwnedObject -UserGreenlet::throw_GreenletExit_during_dealloc(const ThreadState& current_thread_state) -{ - /* The dying greenlet cannot be a parent of ts_current - because the 'parent' field chain would hold a - reference */ - UserGreenlet::ParentIsCurrentGuard with_current_parent(this, current_thread_state); - - // We don't care about the return value, only whether an - // exception happened. Whether or not an exception happens, - // we need to restore the parent in case the greenlet gets - // resurrected. - return Greenlet::throw_GreenletExit_during_dealloc(current_thread_state); -} - -ThreadState* -UserGreenlet::thread_state() const noexcept -{ - // TODO: maybe make this throw, if the thread state isn't there? - // if (!this->main_greenlet) { - // throw std::runtime_error("No thread state"); // TODO: Better exception - // } - if (!this->_main_greenlet) { - return nullptr; - } - return this->_main_greenlet->thread_state(); -} - - -bool -UserGreenlet::was_running_in_dead_thread() const noexcept -{ - return this->_main_greenlet && !this->thread_state(); -} - -OwnedObject -UserGreenlet::g_switch() -{ - assert(this->args() || PyErr_Occurred()); - - try { - this->check_switch_allowed(); - } - catch (const PyErrOccurred&) { - this->release_args(); - throw; - } - - // Switching greenlets used to attempt to clean out ones that need - // deleted *if* we detected a thread switch. Should it still do - // that? - // An issue is that if we delete a greenlet from another thread, - // it gets queued to this thread, and ``kill_greenlet()`` switches - // back into the greenlet - - /* find the real target by ignoring dead greenlets, - and if necessary starting a greenlet. */ - switchstack_result_t err; - Greenlet* target = this; - // TODO: probably cleaner to handle the case where we do - // switch to ourself separately from the other cases. - // This can probably even further be simplified if we keep - // track of the switching_state we're going for and just call - // into g_switch() if it's not ourself. The main problem with that - // is that we would be using more stack space. - bool target_was_me = true; - bool was_initial_stub = false; - while (target) { - if (target->active()) { - if (!target_was_me) { - target->args() <<= this->args(); - assert(!this->args()); - } - err = target->g_switchstack(); - break; - } - if (!target->started()) { - // We never encounter a main greenlet that's not started. - assert(!target->main()); - UserGreenlet* real_target = static_cast(target); - assert(real_target); - void* dummymarker; - was_initial_stub = true; - if (!target_was_me) { - target->args() <<= this->args(); - assert(!this->args()); - } - try { - // This can only throw back to us while we're - // still in this greenlet. Once the new greenlet - // is bootstrapped, it has its own exception state. - err = real_target->g_initialstub(&dummymarker); - } - catch (const PyErrOccurred&) { - this->release_args(); - throw; - } - catch (const GreenletStartedWhileInPython&) { - // The greenlet was started sometime before this - // greenlet actually switched to it, i.e., - // "concurrent" calls to switch() or throw(). - // We need to retry the switch. - // Note that the current greenlet has been reset - // to this one (or we wouldn't be running!) - continue; - } - break; - } - - target = target->parent(); - target_was_me = false; - } - // The ``this`` pointer and all other stack or register based - // variables are invalid now, at least where things succeed - // above. - // But this one, probably not so much? It's not clear if it's - // safe to throw an exception at this point. - - if (err.status < 0) { - // If we get here, either g_initialstub() - // failed, or g_switchstack() failed. Either one of those - // cases SHOULD leave us in the original greenlet with a valid - // stack. - return this->on_switchstack_or_initialstub_failure(target, err, target_was_me, was_initial_stub); - } - - // err.the_new_current_greenlet would be the same as ``target``, - // if target wasn't probably corrupt. - return err.the_new_current_greenlet->g_switch_finish(err); -} - - - -Greenlet::switchstack_result_t -UserGreenlet::g_initialstub(void* mark) -{ - OwnedObject run; - - // We need to grab a reference to the current switch arguments - // in case we're entered concurrently during the call to - // GetAttr() and have to try again. - // We'll restore them when we return in that case. - // Scope them tightly to avoid ref leaks. - { - SwitchingArgs args(this->args()); - - /* save exception in case getattr clears it */ - PyErrPieces saved; - - /* - self.run is the object to call in the new greenlet. - This could run arbitrary python code and switch greenlets! - */ - run = this->self().PyRequireAttr(mod_globs->str_run); - /* restore saved exception */ - saved.PyErrRestore(); - - - /* recheck that it's safe to switch in case greenlet reparented anywhere above */ - this->check_switch_allowed(); - - /* by the time we got here another start could happen elsewhere, - * that means it should now be a regular switch. - * This can happen if the Python code is a subclass that implements - * __getattribute__ or __getattr__, or makes ``run`` a descriptor; - * all of those can run arbitrary code that switches back into - * this greenlet. - */ - if (this->stack_state.started()) { - // the successful switch cleared these out, we need to - // restore our version. They will be copied on up to the - // next target. - assert(!this->args()); - this->args() <<= args; - throw GreenletStartedWhileInPython(); - } - } - - // Sweet, if we got here, we have the go-ahead and will switch - // greenlets. - // Nothing we do from here on out should allow for a thread or - // greenlet switch: No arbitrary calls to Python, including - // decref'ing - -#if GREENLET_USE_CFRAME - /* OK, we need it, we're about to switch greenlets, save the state. */ - /* - See green_new(). This is a stack-allocated variable used - while *self* is in PyObject_Call(). - We want to defer copying the state info until we're sure - we need it and are in a stable place to do so. - */ - _PyCFrame trace_info; - - this->python_state.set_new_cframe(trace_info); -#endif - /* start the greenlet */ - ThreadState& thread_state = GET_THREAD_STATE().state(); - this->stack_state = StackState(mark, - thread_state.borrow_current()->stack_state); - this->python_state.set_initial_state(PyThreadState_GET()); - this->exception_state.clear(); - this->_main_greenlet = thread_state.get_main_greenlet(); - - /* perform the initial switch */ - switchstack_result_t err = this->g_switchstack(); - /* returns twice! - The 1st time with ``err == 1``: we are in the new greenlet. - This one owns a greenlet that used to be current. - The 2nd time with ``err <= 0``: back in the caller's - greenlet; this happens if the child finishes or switches - explicitly to us. Either way, the ``err`` variable is - created twice at the same memory location, but possibly - having different ``origin`` values. Note that it's not - constructed for the second time until the switch actually happens. - */ - if (err.status == 1) { - // In the new greenlet. - - // This never returns! Calling inner_bootstrap steals - // the contents of our run object within this stack frame, so - // it is not valid to do anything with it. - try { - this->inner_bootstrap(err.origin_greenlet.relinquish_ownership(), - run.relinquish_ownership()); - } - // Getting a C++ exception here isn't good. It's probably a - // bug in the underlying greenlet, meaning it's probably a - // C++ extension. We're going to abort anyway, but try to - // display some nice information *if* possible. Some obscure - // platforms don't properly support this (old 32-bit Arm, see see - // https://github.com/python-greenlet/greenlet/issues/385); that's not - // great, but should usually be OK because, as mentioned above, we're - // terminating anyway. - // - // The catching is tested by - // ``test_cpp.CPPTests.test_unhandled_exception_in_greenlet_aborts``. - // - // PyErrOccurred can theoretically be thrown by - // inner_bootstrap() -> g_switch_finish(), but that should - // never make it back to here. It is a std::exception and - // would be caught if it is. - catch (const std::exception& e) { - std::string base = "greenlet: Unhandled C++ exception: "; - base += e.what(); - Py_FatalError(base.c_str()); - } - catch (...) { - // Some compilers/runtimes use exceptions internally. - // It appears that GCC on Linux with libstdc++ throws an - // exception internally at process shutdown time to unwind - // stacks and clean up resources. Depending on exactly - // where we are when the process exits, that could result - // in an unknown exception getting here. If we - // Py_FatalError() or abort() here, we interfere with - // orderly process shutdown. Throwing the exception on up - // is the right thing to do. - // - // gevent's ``examples/dns_mass_resolve.py`` demonstrates this. -#ifndef NDEBUG - fprintf(stderr, - "greenlet: inner_bootstrap threw unknown exception; " - "is the process terminating?\n"); -#endif - throw; - } - Py_FatalError("greenlet: inner_bootstrap returned with no exception.\n"); - } - - - // In contrast, notice that we're keeping the origin greenlet - // around as an owned reference; we need it to call the trace - // function for the switch back into the parent. It was only - // captured at the time the switch actually happened, though, - // so we haven't been keeping an extra reference around this - // whole time. - - /* back in the parent */ - if (err.status < 0) { - /* start failed badly, restore greenlet state */ - this->stack_state = StackState(); - this->_main_greenlet.CLEAR(); - // CAUTION: This may run arbitrary Python code. - run.CLEAR(); // inner_bootstrap didn't run, we own the reference. - } - - // In the success case, the spawned code (inner_bootstrap) will - // take care of decrefing this, so we relinquish ownership so as - // to not double-decref. - - run.relinquish_ownership(); - - return err; -} - - -void -UserGreenlet::inner_bootstrap(PyGreenlet* origin_greenlet, PyObject* run) -{ - // The arguments here would be another great place for move. - // As it is, we take them as a reference so that when we clear - // them we clear what's on the stack above us. Do that NOW, and - // without using a C++ RAII object, - // so there's no way that exiting the parent frame can clear it, - // or we clear it unexpectedly. This arises in the context of the - // interpreter shutting down. See https://github.com/python-greenlet/greenlet/issues/325 - //PyObject* run = _run.relinquish_ownership(); - - /* in the new greenlet */ - assert(this->thread_state()->borrow_current() == BorrowedGreenlet(this->_self)); - // C++ exceptions cannot propagate to the parent greenlet from - // here. (TODO: Do we need a catch(...) clause, perhaps on the - // function itself? ALl we could do is terminate the program.) - // NOTE: On 32-bit Windows, the call chain is extremely - // important here in ways that are subtle, having to do with - // the depth of the SEH list. The call to restore it MUST NOT - // add a new SEH handler to the list, or we'll restore it to - // the wrong thing. - this->thread_state()->restore_exception_state(); - /* stack variables from above are no good and also will not unwind! */ - // EXCEPT: That can't be true, we access run, among others, here. - - this->stack_state.set_active(); /* running */ - - // We're about to possibly run Python code again, which - // could switch back/away to/from us, so we need to grab the - // arguments locally. - SwitchingArgs args; - args <<= this->args(); - assert(!this->args()); - - // XXX: We could clear this much earlier, right? - // Or would that introduce the possibility of running Python - // code when we don't want to? - // CAUTION: This may run arbitrary Python code. - this->_run_callable.CLEAR(); - - - // The first switch we need to manually call the trace - // function here instead of in g_switch_finish, because we - // never return there. - if (OwnedObject tracefunc = this->thread_state()->get_tracefunc()) { - OwnedGreenlet trace_origin; - trace_origin = origin_greenlet; - try { - g_calltrace(tracefunc, - args ? mod_globs->event_switch : mod_globs->event_throw, - trace_origin, - this->_self); - } - catch (const PyErrOccurred&) { - /* Turn trace errors into switch throws */ - args.CLEAR(); - } - } - - // We no longer need the origin, it was only here for - // tracing. - // We may never actually exit this stack frame so we need - // to explicitly clear it. - // This could run Python code and switch. - Py_CLEAR(origin_greenlet); - - OwnedObject result; - if (!args) { - /* pending exception */ - result = NULL; - } - else { - /* call g.run(*args, **kwargs) */ - // This could result in further switches - try { - //result = run.PyCall(args.args(), args.kwargs()); - // CAUTION: Just invoking this, before the function even - // runs, may cause memory allocations, which may trigger - // GC, which may run arbitrary Python code. - result = OwnedObject::consuming(PyObject_Call(run, args.args().borrow(), args.kwargs().borrow())); - } - catch (...) { - // Unhandled C++ exception! - - // If we declare ourselves as noexcept, if we don't catch - // this here, most platforms will just abort() the - // process. But on 64-bit Windows with older versions of - // the C runtime, this can actually corrupt memory and - // just return. We see this when compiling with the - // Windows 7.0 SDK targeting Windows Server 2008, but not - // when using the Appveyor Visual Studio 2019 image. So - // this currently only affects Python 2.7 on Windows 64. - // That is, the tests pass and the runtime aborts - // everywhere else. - // - // However, if we catch it and try to continue with a - // Python error, then all Windows 64 bit platforms corrupt - // memory. So all we can do is manually abort, hopefully - // with a good error message. (Note that the above was - // tested WITHOUT the `/EHr` switch being used at compile - // time, so MSVC may have "optimized" out important - // checking. Using that switch, we may be in a better - // place in terms of memory corruption.) But sometimes it - // can't be caught here at all, which is confusing but not - // terribly surprising; so again, the G_NOEXCEPT_WIN32 - // plus "/EHr". - // - // Hopefully the basic C stdlib is still functional enough - // for us to at least print an error. - // - // It gets more complicated than that, though, on some - // platforms, specifically at least Linux/gcc/libstdc++. They use - // an exception to unwind the stack when a background - // thread exits. (See comments about noexcept.) So this - // may not actually represent anything untoward. On those - // platforms we allow throws of this to propagate, or - // attempt to anyway. -# if defined(WIN32) || defined(_WIN32) - Py_FatalError( - "greenlet: Unhandled C++ exception from a greenlet run function. " - "Because memory is likely corrupted, terminating process."); - std::abort(); -#else - throw; -#endif - } - } - // These lines may run arbitrary code - args.CLEAR(); - Py_CLEAR(run); - - if (!result - && mod_globs->PyExc_GreenletExit.PyExceptionMatches() - && (this->args())) { - // This can happen, for example, if our only reference - // goes away after we switch back to the parent. - // See test_dealloc_switch_args_not_lost - PyErrPieces clear_error; - result <<= this->args(); - result = single_result(result); - } - this->release_args(); - this->python_state.did_finish(PyThreadState_GET()); - - result = g_handle_exit(result); - assert(this->thread_state()->borrow_current() == this->_self); - - /* jump back to parent */ - this->stack_state.set_inactive(); /* dead */ - - - // TODO: Can we decref some things here? Release our main greenlet - // and maybe parent? - for (Greenlet* parent = this->_parent; - parent; - parent = parent->parent()) { - // We need to somewhere consume a reference to - // the result; in most cases we'll never have control - // back in this stack frame again. Calling - // green_switch actually adds another reference! - // This would probably be clearer with a specific API - // to hand results to the parent. - parent->args() <<= result; - assert(!result); - // The parent greenlet now owns the result; in the - // typical case we'll never get back here to assign to - // result and thus release the reference. - try { - result = parent->g_switch(); - } - catch (const PyErrOccurred&) { - // Ignore, keep passing the error on up. - } - - /* Return here means switch to parent failed, - * in which case we throw *current* exception - * to the next parent in chain. - */ - assert(!result); - } - /* We ran out of parents, cannot continue */ - PyErr_WriteUnraisable(this->self().borrow_o()); - Py_FatalError("greenlet: ran out of parent greenlets while propagating exception; " - "cannot continue"); - std::abort(); -} - -void -UserGreenlet::run(const BorrowedObject nrun) -{ - if (this->started()) { - throw AttributeError( - "run cannot be set " - "after the start of the greenlet"); - } - this->_run_callable = nrun; -} - -const OwnedGreenlet -UserGreenlet::parent() const -{ - return this->_parent; -} - -void -UserGreenlet::parent(const BorrowedObject raw_new_parent) -{ - if (!raw_new_parent) { - throw AttributeError("can't delete attribute"); - } - - BorrowedMainGreenlet main_greenlet_of_new_parent; - BorrowedGreenlet new_parent(raw_new_parent.borrow()); // could - // throw - // TypeError! - for (BorrowedGreenlet p = new_parent; p; p = p->parent()) { - if (p == this->self()) { - throw ValueError("cyclic parent chain"); - } - main_greenlet_of_new_parent = p->main_greenlet(); - } - - if (!main_greenlet_of_new_parent) { - throw ValueError("parent must not be garbage collected"); - } - - if (this->started() - && this->_main_greenlet != main_greenlet_of_new_parent) { - throw ValueError("parent cannot be on a different thread"); - } - - this->_parent = new_parent; -} - -void -UserGreenlet::murder_in_place() -{ - this->_main_greenlet.CLEAR(); - Greenlet::murder_in_place(); -} - -bool -UserGreenlet::belongs_to_thread(const ThreadState* thread_state) const -{ - return Greenlet::belongs_to_thread(thread_state) && this->_main_greenlet == thread_state->borrow_main_greenlet(); -} - - -int -UserGreenlet::tp_traverse(visitproc visit, void* arg) -{ - Py_VISIT(this->_parent.borrow_o()); - Py_VISIT(this->_main_greenlet.borrow_o()); - Py_VISIT(this->_run_callable.borrow_o()); - - return Greenlet::tp_traverse(visit, arg); -} - -int -UserGreenlet::tp_clear() -{ - Greenlet::tp_clear(); - this->_parent.CLEAR(); - this->_main_greenlet.CLEAR(); - this->_run_callable.CLEAR(); - return 0; -} - -UserGreenlet::ParentIsCurrentGuard::ParentIsCurrentGuard(UserGreenlet* p, - const ThreadState& thread_state) - : oldparent(p->_parent), - greenlet(p) -{ - p->_parent = thread_state.get_current(); -} - -UserGreenlet::ParentIsCurrentGuard::~ParentIsCurrentGuard() -{ - this->greenlet->_parent = oldparent; - oldparent.CLEAR(); -} - -}; //namespace greenlet -#endif diff --git a/myenv/Lib/site-packages/greenlet/__init__.py b/myenv/Lib/site-packages/greenlet/__init__.py deleted file mode 100644 index b2dcc9b..0000000 --- a/myenv/Lib/site-packages/greenlet/__init__.py +++ /dev/null @@ -1,71 +0,0 @@ -# -*- coding: utf-8 -*- -""" -The root of the greenlet package. -""" -from __future__ import absolute_import -from __future__ import division -from __future__ import print_function - -__all__ = [ - '__version__', - '_C_API', - - 'GreenletExit', - 'error', - - 'getcurrent', - 'greenlet', - - 'gettrace', - 'settrace', -] - -# pylint:disable=no-name-in-module - -### -# Metadata -### -__version__ = '3.1.1' -from ._greenlet import _C_API # pylint:disable=no-name-in-module - -### -# Exceptions -### -from ._greenlet import GreenletExit -from ._greenlet import error - -### -# greenlets -### -from ._greenlet import getcurrent -from ._greenlet import greenlet - -### -# tracing -### -try: - from ._greenlet import gettrace - from ._greenlet import settrace -except ImportError: - # Tracing wasn't supported. - # XXX: The option to disable it was removed in 1.0, - # so this branch should be dead code. - pass - -### -# Constants -# These constants aren't documented and aren't recommended. -# In 1.0, USE_GC and USE_TRACING are always true, and USE_CONTEXT_VARS -# is the same as ``sys.version_info[:2] >= 3.7`` -### -from ._greenlet import GREENLET_USE_CONTEXT_VARS # pylint:disable=unused-import -from ._greenlet import GREENLET_USE_GC # pylint:disable=unused-import -from ._greenlet import GREENLET_USE_TRACING # pylint:disable=unused-import - -# Controlling the use of the gc module. Provisional API for this greenlet -# implementation in 2.0. -from ._greenlet import CLOCKS_PER_SEC # pylint:disable=unused-import -from ._greenlet import enable_optional_cleanup # pylint:disable=unused-import -from ._greenlet import get_clocks_used_doing_optional_cleanup # pylint:disable=unused-import - -# Other APIS in the _greenlet module are for test support. diff --git a/myenv/Lib/site-packages/greenlet/_greenlet.cp312-win_amd64.pyd b/myenv/Lib/site-packages/greenlet/_greenlet.cp312-win_amd64.pyd deleted file mode 100644 index 435a41a..0000000 Binary files a/myenv/Lib/site-packages/greenlet/_greenlet.cp312-win_amd64.pyd and /dev/null differ diff --git a/myenv/Lib/site-packages/greenlet/greenlet.cpp b/myenv/Lib/site-packages/greenlet/greenlet.cpp deleted file mode 100644 index e8d92a0..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet.cpp +++ /dev/null @@ -1,320 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -/* Format with: - * clang-format -i --style=file src/greenlet/greenlet.c - * - * - * Fix missing braces with: - * clang-tidy src/greenlet/greenlet.c -fix -checks="readability-braces-around-statements" -*/ -#include -#include -#include -#include - - -#define PY_SSIZE_T_CLEAN -#include -#include "structmember.h" // PyMemberDef - -#include "greenlet_internal.hpp" -// Code after this point can assume access to things declared in stdint.h, -// including the fixed-width types. This goes for the platform-specific switch functions -// as well. -#include "greenlet_refs.hpp" -#include "greenlet_slp_switch.hpp" - -#include "greenlet_thread_support.hpp" -#include "TGreenlet.hpp" - -#include "TGreenletGlobals.cpp" - -#include "TGreenlet.cpp" -#include "TMainGreenlet.cpp" -#include "TUserGreenlet.cpp" -#include "TBrokenGreenlet.cpp" -#include "TExceptionState.cpp" -#include "TPythonState.cpp" -#include "TStackState.cpp" - -#include "TThreadState.hpp" -#include "TThreadStateCreator.hpp" -#include "TThreadStateDestroy.cpp" - -#include "PyGreenlet.cpp" -#include "PyGreenletUnswitchable.cpp" -#include "CObjects.cpp" - -using greenlet::LockGuard; -using greenlet::LockInitError; -using greenlet::PyErrOccurred; -using greenlet::Require; - -using greenlet::g_handle_exit; -using greenlet::single_result; - -using greenlet::Greenlet; -using greenlet::UserGreenlet; -using greenlet::MainGreenlet; -using greenlet::BrokenGreenlet; -using greenlet::ThreadState; -using greenlet::PythonState; - - - -// ******* Implementation of things from included files -template -greenlet::refs::_BorrowedGreenlet& greenlet::refs::_BorrowedGreenlet::operator=(const greenlet::refs::BorrowedObject& other) -{ - this->_set_raw_pointer(static_cast(other)); - return *this; -} - -template -inline greenlet::refs::_BorrowedGreenlet::operator Greenlet*() const noexcept -{ - if (!this->p) { - return nullptr; - } - return reinterpret_cast(this->p)->pimpl; -} - -template -greenlet::refs::_BorrowedGreenlet::_BorrowedGreenlet(const BorrowedObject& p) - : BorrowedReference(nullptr) -{ - - this->_set_raw_pointer(p.borrow()); -} - -template -inline greenlet::refs::_OwnedGreenlet::operator Greenlet*() const noexcept -{ - if (!this->p) { - return nullptr; - } - return reinterpret_cast(this->p)->pimpl; -} - - - -#ifdef __clang__ -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wmissing-field-initializers" -# pragma clang diagnostic ignored "-Wwritable-strings" -#elif defined(__GNUC__) -# pragma GCC diagnostic push -// warning: ISO C++ forbids converting a string constant to ‘char*’ -// (The python APIs aren't const correct and accept writable char*) -# pragma GCC diagnostic ignored "-Wwrite-strings" -#endif - - -/*********************************************************** - -A PyGreenlet is a range of C stack addresses that must be -saved and restored in such a way that the full range of the -stack contains valid data when we switch to it. - -Stack layout for a greenlet: - - | ^^^ | - | older data | - | | - stack_stop . |_______________| - . | | - . | greenlet data | - . | in stack | - . * |_______________| . . _____________ stack_copy + stack_saved - . | | | | - . | data | |greenlet data| - . | unrelated | | saved | - . | to | | in heap | - stack_start . | this | . . |_____________| stack_copy - | greenlet | - | | - | newer data | - | vvv | - - -Note that a greenlet's stack data is typically partly at its correct -place in the stack, and partly saved away in the heap, but always in -the above configuration: two blocks, the more recent one in the heap -and the older one still in the stack (either block may be empty). - -Greenlets are chained: each points to the previous greenlet, which is -the one that owns the data currently in the C stack above my -stack_stop. The currently running greenlet is the first element of -this chain. The main (initial) greenlet is the last one. Greenlets -whose stack is entirely in the heap can be skipped from the chain. - -The chain is not related to execution order, but only to the order -in which bits of C stack happen to belong to greenlets at a particular -point in time. - -The main greenlet doesn't have a stack_stop: it is responsible for the -complete rest of the C stack, and we don't know where it begins. We -use (char*) -1, the largest possible address. - -States: - stack_stop == NULL && stack_start == NULL: did not start yet - stack_stop != NULL && stack_start == NULL: already finished - stack_stop != NULL && stack_start != NULL: active - -The running greenlet's stack_start is undefined but not NULL. - - ***********************************************************/ - - - - -/***********************************************************/ - -/* Some functions must not be inlined: - * slp_restore_state, when inlined into slp_switch might cause - it to restore stack over its own local variables - * slp_save_state, when inlined would add its own local - variables to the saved stack, wasting space - * slp_switch, cannot be inlined for obvious reasons - * g_initialstub, when inlined would receive a pointer into its - own stack frame, leading to incomplete stack save/restore - -g_initialstub is a member function and declared virtual so that the -compiler always calls it through a vtable. - -slp_save_state and slp_restore_state are also member functions. They -are called from trampoline functions that themselves are declared as -not eligible for inlining. -*/ - -extern "C" { -static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref) -{ - return switching_thread_state->slp_save_state(stackref); -} -static void GREENLET_NOINLINE(slp_restore_state_trampoline)() -{ - switching_thread_state->slp_restore_state(); -} -} - - -/***********************************************************/ - - -#include "PyModule.cpp" - - - -static PyObject* -greenlet_internal_mod_init() noexcept -{ - static void* _PyGreenlet_API[PyGreenlet_API_pointers]; - - try { - CreatedModule m(greenlet_module_def); - - Require(PyType_Ready(&PyGreenlet_Type)); - Require(PyType_Ready(&PyGreenletUnswitchable_Type)); - - mod_globs = new greenlet::GreenletGlobals; - ThreadState::init(); - - m.PyAddObject("greenlet", PyGreenlet_Type); - m.PyAddObject("UnswitchableGreenlet", PyGreenletUnswitchable_Type); - m.PyAddObject("error", mod_globs->PyExc_GreenletError); - m.PyAddObject("GreenletExit", mod_globs->PyExc_GreenletExit); - - m.PyAddObject("GREENLET_USE_GC", 1); - m.PyAddObject("GREENLET_USE_TRACING", 1); - m.PyAddObject("GREENLET_USE_CONTEXT_VARS", 1L); - m.PyAddObject("GREENLET_USE_STANDARD_THREADING", 1L); - - OwnedObject clocks_per_sec = OwnedObject::consuming(PyLong_FromSsize_t(CLOCKS_PER_SEC)); - m.PyAddObject("CLOCKS_PER_SEC", clocks_per_sec); - - /* also publish module-level data as attributes of the greentype. */ - // XXX: This is weird, and enables a strange pattern of - // confusing the class greenlet with the module greenlet; with - // the exception of (possibly) ``getcurrent()``, this - // shouldn't be encouraged so don't add new items here. - for (const char* const* p = copy_on_greentype; *p; p++) { - OwnedObject o = m.PyRequireAttr(*p); - PyDict_SetItemString(PyGreenlet_Type.tp_dict, *p, o.borrow()); - } - - /* - * Expose C API - */ - - /* types */ - _PyGreenlet_API[PyGreenlet_Type_NUM] = (void*)&PyGreenlet_Type; - - /* exceptions */ - _PyGreenlet_API[PyExc_GreenletError_NUM] = (void*)mod_globs->PyExc_GreenletError; - _PyGreenlet_API[PyExc_GreenletExit_NUM] = (void*)mod_globs->PyExc_GreenletExit; - - /* methods */ - _PyGreenlet_API[PyGreenlet_New_NUM] = (void*)PyGreenlet_New; - _PyGreenlet_API[PyGreenlet_GetCurrent_NUM] = (void*)PyGreenlet_GetCurrent; - _PyGreenlet_API[PyGreenlet_Throw_NUM] = (void*)PyGreenlet_Throw; - _PyGreenlet_API[PyGreenlet_Switch_NUM] = (void*)PyGreenlet_Switch; - _PyGreenlet_API[PyGreenlet_SetParent_NUM] = (void*)PyGreenlet_SetParent; - - /* Previously macros, but now need to be functions externally. */ - _PyGreenlet_API[PyGreenlet_MAIN_NUM] = (void*)Extern_PyGreenlet_MAIN; - _PyGreenlet_API[PyGreenlet_STARTED_NUM] = (void*)Extern_PyGreenlet_STARTED; - _PyGreenlet_API[PyGreenlet_ACTIVE_NUM] = (void*)Extern_PyGreenlet_ACTIVE; - _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM] = (void*)Extern_PyGreenlet_GET_PARENT; - - /* XXX: Note that our module name is ``greenlet._greenlet``, but for - backwards compatibility with existing C code, we need the _C_API to - be directly in greenlet. - */ - const NewReference c_api_object(Require( - PyCapsule_New( - (void*)_PyGreenlet_API, - "greenlet._C_API", - NULL))); - m.PyAddObject("_C_API", c_api_object); - assert(c_api_object.REFCNT() == 2); - - // cerr << "Sizes:" - // << "\n\tGreenlet : " << sizeof(Greenlet) - // << "\n\tUserGreenlet : " << sizeof(UserGreenlet) - // << "\n\tMainGreenlet : " << sizeof(MainGreenlet) - // << "\n\tExceptionState : " << sizeof(greenlet::ExceptionState) - // << "\n\tPythonState : " << sizeof(greenlet::PythonState) - // << "\n\tStackState : " << sizeof(greenlet::StackState) - // << "\n\tSwitchingArgs : " << sizeof(greenlet::SwitchingArgs) - // << "\n\tOwnedObject : " << sizeof(greenlet::refs::OwnedObject) - // << "\n\tBorrowedObject : " << sizeof(greenlet::refs::BorrowedObject) - // << "\n\tPyGreenlet : " << sizeof(PyGreenlet) - // << endl; - - return m.borrow(); // But really it's the main reference. - } - catch (const LockInitError& e) { - PyErr_SetString(PyExc_MemoryError, e.what()); - return NULL; - } - catch (const PyErrOccurred&) { - return NULL; - } - -} - -extern "C" { - -PyMODINIT_FUNC -PyInit__greenlet(void) -{ - return greenlet_internal_mod_init(); -} - -}; // extern C - -#ifdef __clang__ -# pragma clang diagnostic pop -#elif defined(__GNUC__) -# pragma GCC diagnostic pop -#endif diff --git a/myenv/Lib/site-packages/greenlet/greenlet.h b/myenv/Lib/site-packages/greenlet/greenlet.h deleted file mode 100644 index d02a16e..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet.h +++ /dev/null @@ -1,164 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ - -/* Greenlet object interface */ - -#ifndef Py_GREENLETOBJECT_H -#define Py_GREENLETOBJECT_H - - -#include - -#ifdef __cplusplus -extern "C" { -#endif - -/* This is deprecated and undocumented. It does not change. */ -#define GREENLET_VERSION "1.0.0" - -#ifndef GREENLET_MODULE -#define implementation_ptr_t void* -#endif - -typedef struct _greenlet { - PyObject_HEAD - PyObject* weakreflist; - PyObject* dict; - implementation_ptr_t pimpl; -} PyGreenlet; - -#define PyGreenlet_Check(op) (op && PyObject_TypeCheck(op, &PyGreenlet_Type)) - - -/* C API functions */ - -/* Total number of symbols that are exported */ -#define PyGreenlet_API_pointers 12 - -#define PyGreenlet_Type_NUM 0 -#define PyExc_GreenletError_NUM 1 -#define PyExc_GreenletExit_NUM 2 - -#define PyGreenlet_New_NUM 3 -#define PyGreenlet_GetCurrent_NUM 4 -#define PyGreenlet_Throw_NUM 5 -#define PyGreenlet_Switch_NUM 6 -#define PyGreenlet_SetParent_NUM 7 - -#define PyGreenlet_MAIN_NUM 8 -#define PyGreenlet_STARTED_NUM 9 -#define PyGreenlet_ACTIVE_NUM 10 -#define PyGreenlet_GET_PARENT_NUM 11 - -#ifndef GREENLET_MODULE -/* This section is used by modules that uses the greenlet C API */ -static void** _PyGreenlet_API = NULL; - -# define PyGreenlet_Type \ - (*(PyTypeObject*)_PyGreenlet_API[PyGreenlet_Type_NUM]) - -# define PyExc_GreenletError \ - ((PyObject*)_PyGreenlet_API[PyExc_GreenletError_NUM]) - -# define PyExc_GreenletExit \ - ((PyObject*)_PyGreenlet_API[PyExc_GreenletExit_NUM]) - -/* - * PyGreenlet_New(PyObject *args) - * - * greenlet.greenlet(run, parent=None) - */ -# define PyGreenlet_New \ - (*(PyGreenlet * (*)(PyObject * run, PyGreenlet * parent)) \ - _PyGreenlet_API[PyGreenlet_New_NUM]) - -/* - * PyGreenlet_GetCurrent(void) - * - * greenlet.getcurrent() - */ -# define PyGreenlet_GetCurrent \ - (*(PyGreenlet * (*)(void)) _PyGreenlet_API[PyGreenlet_GetCurrent_NUM]) - -/* - * PyGreenlet_Throw( - * PyGreenlet *greenlet, - * PyObject *typ, - * PyObject *val, - * PyObject *tb) - * - * g.throw(...) - */ -# define PyGreenlet_Throw \ - (*(PyObject * (*)(PyGreenlet * self, \ - PyObject * typ, \ - PyObject * val, \ - PyObject * tb)) \ - _PyGreenlet_API[PyGreenlet_Throw_NUM]) - -/* - * PyGreenlet_Switch(PyGreenlet *greenlet, PyObject *args) - * - * g.switch(*args, **kwargs) - */ -# define PyGreenlet_Switch \ - (*(PyObject * \ - (*)(PyGreenlet * greenlet, PyObject * args, PyObject * kwargs)) \ - _PyGreenlet_API[PyGreenlet_Switch_NUM]) - -/* - * PyGreenlet_SetParent(PyObject *greenlet, PyObject *new_parent) - * - * g.parent = new_parent - */ -# define PyGreenlet_SetParent \ - (*(int (*)(PyGreenlet * greenlet, PyGreenlet * nparent)) \ - _PyGreenlet_API[PyGreenlet_SetParent_NUM]) - -/* - * PyGreenlet_GetParent(PyObject* greenlet) - * - * return greenlet.parent; - * - * This could return NULL even if there is no exception active. - * If it does not return NULL, you are responsible for decrementing the - * reference count. - */ -# define PyGreenlet_GetParent \ - (*(PyGreenlet* (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_GET_PARENT_NUM]) - -/* - * deprecated, undocumented alias. - */ -# define PyGreenlet_GET_PARENT PyGreenlet_GetParent - -# define PyGreenlet_MAIN \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_MAIN_NUM]) - -# define PyGreenlet_STARTED \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_STARTED_NUM]) - -# define PyGreenlet_ACTIVE \ - (*(int (*)(PyGreenlet*)) \ - _PyGreenlet_API[PyGreenlet_ACTIVE_NUM]) - - - - -/* Macro that imports greenlet and initializes C API */ -/* NOTE: This has actually moved to ``greenlet._greenlet._C_API``, but we - keep the older definition to be sure older code that might have a copy of - the header still works. */ -# define PyGreenlet_Import() \ - { \ - _PyGreenlet_API = (void**)PyCapsule_Import("greenlet._C_API", 0); \ - } - -#endif /* GREENLET_MODULE */ - -#ifdef __cplusplus -} -#endif -#endif /* !Py_GREENLETOBJECT_H */ diff --git a/myenv/Lib/site-packages/greenlet/greenlet_allocator.hpp b/myenv/Lib/site-packages/greenlet/greenlet_allocator.hpp deleted file mode 100644 index b452f54..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet_allocator.hpp +++ /dev/null @@ -1,63 +0,0 @@ -#ifndef GREENLET_ALLOCATOR_HPP -#define GREENLET_ALLOCATOR_HPP - -#define PY_SSIZE_T_CLEAN -#include -#include -#include "greenlet_compiler_compat.hpp" - - -namespace greenlet -{ - // This allocator is stateless; all instances are identical. - // It can *ONLY* be used when we're sure we're holding the GIL - // (Python's allocators require the GIL). - template - struct PythonAllocator : public std::allocator { - - PythonAllocator(const PythonAllocator& UNUSED(other)) - : std::allocator() - { - } - - PythonAllocator(const std::allocator other) - : std::allocator(other) - {} - - template - PythonAllocator(const std::allocator& other) - : std::allocator(other) - { - } - - PythonAllocator() : std::allocator() {} - - T* allocate(size_t number_objects, const void* UNUSED(hint)=0) - { - void* p; - if (number_objects == 1) - p = PyObject_Malloc(sizeof(T)); - else - p = PyMem_Malloc(sizeof(T) * number_objects); - return static_cast(p); - } - - void deallocate(T* t, size_t n) - { - void* p = t; - if (n == 1) { - PyObject_Free(p); - } - else - PyMem_Free(p); - } - // This member is deprecated in C++17 and removed in C++20 - template< class U > - struct rebind { - typedef PythonAllocator other; - }; - - }; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/greenlet_compiler_compat.hpp b/myenv/Lib/site-packages/greenlet/greenlet_compiler_compat.hpp deleted file mode 100644 index af24bd8..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet_compiler_compat.hpp +++ /dev/null @@ -1,98 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -#ifndef GREENLET_COMPILER_COMPAT_HPP -#define GREENLET_COMPILER_COMPAT_HPP - -/** - * Definitions to aid with compatibility with different compilers. - * - * .. caution:: Use extreme care with noexcept. - * Some compilers and runtimes, specifically gcc/libgcc/libstdc++ on - * Linux, implement stack unwinding by throwing an uncatchable - * exception, one that specifically does not appear to be an active - * exception to the rest of the runtime. If this happens while we're in a noexcept function, - * we have violated our dynamic exception contract, and so the runtime - * will call std::terminate(), which kills the process with the - * unhelpful message "terminate called without an active exception". - * - * This has happened in this scenario: A background thread is running - * a greenlet that has made a native call and released the GIL. - * Meanwhile, the main thread finishes and starts shutting down the - * interpreter. When the background thread is scheduled again and - * attempts to obtain the GIL, it notices that the interpreter is - * exiting and calls ``pthread_exit()``. This in turn starts to unwind - * the stack by throwing that exception. But we had the ``PyCall`` - * functions annotated as noexcept, so the runtime terminated us. - * - * #2 0x00007fab26fec2b7 in std::terminate() () from /lib/x86_64-linux-gnu/libstdc++.so.6 - * #3 0x00007fab26febb3c in __gxx_personality_v0 () from /lib/x86_64-linux-gnu/libstdc++.so.6 - * #4 0x00007fab26f34de6 in ?? () from /lib/x86_64-linux-gnu/libgcc_s.so.1 - * #6 0x00007fab276a34c6 in __GI___pthread_unwind at ./nptl/unwind.c:130 - * #7 0x00007fab2769bd3a in __do_cancel () at ../sysdeps/nptl/pthreadP.h:280 - * #8 __GI___pthread_exit (value=value@entry=0x0) at ./nptl/pthread_exit.c:36 - * #9 0x000000000052e567 in PyThread_exit_thread () at ../Python/thread_pthread.h:370 - * #10 0x00000000004d60b5 in take_gil at ../Python/ceval_gil.h:224 - * #11 0x00000000004d65f9 in PyEval_RestoreThread at ../Python/ceval.c:467 - * #12 0x000000000060cce3 in setipaddr at ../Modules/socketmodule.c:1203 - * #13 0x00000000006101cd in socket_gethostbyname - */ - -#include - -# define G_NO_COPIES_OF_CLS(Cls) private: \ - Cls(const Cls& other) = delete; \ - Cls& operator=(const Cls& other) = delete - -# define G_NO_ASSIGNMENT_OF_CLS(Cls) private: \ - Cls& operator=(const Cls& other) = delete - -# define G_NO_COPY_CONSTRUCTOR_OF_CLS(Cls) private: \ - Cls(const Cls& other) = delete; - - -// CAUTION: MSVC is stupidly picky: -// -// "The compiler ignores, without warning, any __declspec keywords -// placed after * or & and in front of the variable identifier in a -// declaration." -// (https://docs.microsoft.com/en-us/cpp/cpp/declspec?view=msvc-160) -// -// So pointer return types must be handled differently (because of the -// trailing *), or you get inscrutable compiler warnings like "error -// C2059: syntax error: ''" -// -// In C++ 11, there is a standard syntax for attributes, and -// GCC defines an attribute to use with this: [[gnu:noinline]]. -// In the future, this is expected to become standard. - -#if defined(__GNUC__) || defined(__clang__) -/* We used to check for GCC 4+ or 3.4+, but those compilers are - laughably out of date. Just assume they support it. */ -# define GREENLET_NOINLINE(name) __attribute__((noinline)) name -# define GREENLET_NOINLINE_P(rtype, name) rtype __attribute__((noinline)) name -# define UNUSED(x) UNUSED_ ## x __attribute__((__unused__)) -#elif defined(_MSC_VER) -/* We used to check for && (_MSC_VER >= 1300) but that's also out of date. */ -# define GREENLET_NOINLINE(name) __declspec(noinline) name -# define GREENLET_NOINLINE_P(rtype, name) __declspec(noinline) rtype name -# define UNUSED(x) UNUSED_ ## x -#endif - -#if defined(_MSC_VER) -# define G_NOEXCEPT_WIN32 noexcept -#else -# define G_NOEXCEPT_WIN32 -#endif - -#if defined(__GNUC__) && defined(__POWERPC__) && defined(__APPLE__) -// 32-bit PPC/MacOSX. Only known to be tested on unreleased versions -// of macOS 10.6 using a macports build gcc 14. It appears that -// running C++ destructors of thread-local variables is broken. - -// See https://github.com/python-greenlet/greenlet/pull/419 -# define GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK 1 -#else -# define GREENLET_BROKEN_THREAD_LOCAL_CLEANUP_JUST_LEAK 0 -#endif - - -#endif diff --git a/myenv/Lib/site-packages/greenlet/greenlet_cpython_add_pending.hpp b/myenv/Lib/site-packages/greenlet/greenlet_cpython_add_pending.hpp deleted file mode 100644 index 0d28efd..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet_cpython_add_pending.hpp +++ /dev/null @@ -1,172 +0,0 @@ -#ifndef GREENLET_CPYTHON_ADD_PENDING_HPP -#define GREENLET_CPYTHON_ADD_PENDING_HPP - -#if (PY_VERSION_HEX >= 0x30800A0 && PY_VERSION_HEX < 0x3090000) && !(defined(_WIN32) || defined(WIN32)) -// XXX: From Python 3.8a3 [1] up until Python 3.9a6 [2][3], -// ``Py_AddPendingCall`` would try to produce a Python exception if -// the interpreter was in the beginning of shutting down when this -// function is called. However, ``Py_AddPendingCall`` doesn't require -// the GIL, and we are absolutely not holding it when we make that -// call. That means that trying to create the Python exception is -// using the C API in an undefined state; here the C API detects this -// and aborts the process with an error ("Fatal Python error: Python -// memory allocator called without holding the GIL": Add -> -// PyErr_SetString -> PyUnicode_New -> PyObject_Malloc). This arises -// (obviously) in multi-threaded programs and happens if one thread is -// exiting and cleaning up its thread-local data while the other -// thread is trying to shut down the interpreter. A crash on shutdown -// is still a crash and could result in data loss (e.g., daemon -// threads are still running, pending signal handlers may be present, -// buffers may not be flushed, there may be __del__ that need run, -// etc), so we have to work around it. -// -// Of course, we can (and do) check for whether the interpreter is -// shutting down before calling ``Py_AddPendingCall``, but that's a -// race condition since we don't hold the GIL, and so we may not -// actually get the right answer. Plus, ``Py_FinalizeEx`` actually -// calls ``_Py_FinishPendingCalls`` (which sets the pending->finishing -// flag, which is used to gate creating the exceptioen) *before* -// publishing any other data that would let us detect the shutdown -// (such as runtime->finalizing). So that point is moot. -// -// Our solution for those versions is to inline the same code, without -// the problematic bit that sets the exception. Unfortunately, all of -// the structure definitions are private/opaque, *and* we can't -// actually count on being able to include their definitions from -// ``internal/pycore_*``, because on some platforms those header files -// are incomplete (i.e., on macOS with macports 3.8, the includes are -// fine, but on Ubuntu jammy with 3.8 from ppa:deadsnakes or GitHub -// Actions 3.8 (I think it's Ubuntu 18.04), they con't be used; at -// least, I couldn't get them to work). So we need to define the -// structures and _PyRuntime data member ourself. Yet more -// unfortunately, _PyRuntime won't link on Windows, so we can only do -// this on other platforms. -// -// [1] https://github.com/python/cpython/commit/842a2f07f2f08a935ef470bfdaeef40f87490cfc -// [2] https://github.com/python/cpython/commit/cfc3c2f8b34d3864717ab584c5b6c260014ba55a -// [3] https://github.com/python/cpython/issues/81308 -# define GREENLET_BROKEN_PY_ADD_PENDING 1 - -// When defining these structures, the important thing is to get -// binary compatibility, i.e., structure layout. For that, we only -// need to define fields up to the ones we use; after that they're -// irrelevant UNLESS the structure is included in another structure -// *before* the structure we're interested in --- in that case, it -// must be complete. Ellipsis indicate elided trailing members. -// Pointer types are changed to void* to keep from having to define -// more structures. - -// From "internal/pycore_atomic.h" - -// There are several different definitions of this, including the -// plain ``int`` version, a ``volatile int`` and an ``_Atomic int`` -// I don't think any of those change the size/layout. -typedef struct _Py_atomic_int { - volatile int _value; -} _Py_atomic_int; - -// This needs too much infrastructure, so we just do a regular store. -#define _Py_atomic_store_relaxed(ATOMIC_VAL, NEW_VAL) \ - (ATOMIC_VAL)->_value = NEW_VAL - - - -// From "internal/pycore_pymem.h" -#define NUM_GENERATIONS 3 - - -struct gc_generation { - PyGC_Head head; // We already have this defined. - int threshold; - int count; -}; -struct gc_generation_stats { - Py_ssize_t collections; - Py_ssize_t collected; - Py_ssize_t uncollectable; -}; - -struct _gc_runtime_state { - void *trash_delete_later; - int trash_delete_nesting; - int enabled; - int debug; - struct gc_generation generations[NUM_GENERATIONS]; - void *generation0; - struct gc_generation permanent_generation; - struct gc_generation_stats generation_stats[NUM_GENERATIONS]; - int collecting; - void *garbage; - void *callbacks; - Py_ssize_t long_lived_total; - Py_ssize_t long_lived_pending; -}; - -// From "internal/pycore_pystate.h" -struct _pending_calls { - int finishing; - PyThread_type_lock lock; - _Py_atomic_int calls_to_do; - int async_exc; -#define NPENDINGCALLS 32 - struct { - int (*func)(void *); - void *arg; - } calls[NPENDINGCALLS]; - int first; - int last; -}; - -struct _ceval_runtime_state { - int recursion_limit; - int tracing_possible; - _Py_atomic_int eval_breaker; - _Py_atomic_int gil_drop_request; - struct _pending_calls pending; - // ... -}; - -typedef struct pyruntimestate { - int preinitializing; - int preinitialized; - int core_initialized; - int initialized; - void *finalizing; - - struct pyinterpreters { - PyThread_type_lock mutex; - void *head; - void *main; - int64_t next_id; - } interpreters; - // XXX Remove this field once we have a tp_* slot. - struct _xidregistry { - PyThread_type_lock mutex; - void *head; - } xidregistry; - - unsigned long main_thread; - -#define NEXITFUNCS 32 - void (*exitfuncs[NEXITFUNCS])(void); - int nexitfuncs; - - struct _gc_runtime_state gc; - struct _ceval_runtime_state ceval; - // ... -} _PyRuntimeState; - -#define SIGNAL_PENDING_CALLS(ceval) \ - do { \ - _Py_atomic_store_relaxed(&(ceval)->pending.calls_to_do, 1); \ - _Py_atomic_store_relaxed(&(ceval)->eval_breaker, 1); \ - } while (0) - -extern _PyRuntimeState _PyRuntime; - -#else -# define GREENLET_BROKEN_PY_ADD_PENDING 0 -#endif - - -#endif diff --git a/myenv/Lib/site-packages/greenlet/greenlet_cpython_compat.hpp b/myenv/Lib/site-packages/greenlet/greenlet_cpython_compat.hpp deleted file mode 100644 index ce5fd88..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet_cpython_compat.hpp +++ /dev/null @@ -1,142 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -#ifndef GREENLET_CPYTHON_COMPAT_H -#define GREENLET_CPYTHON_COMPAT_H - -/** - * Helpers for compatibility with multiple versions of CPython. - */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" - - -#if PY_VERSION_HEX >= 0x30A00B1 -# define GREENLET_PY310 1 -#else -# define GREENLET_PY310 0 -#endif - -/* -Python 3.10 beta 1 changed tstate->use_tracing to a nested cframe member. -See https://github.com/python/cpython/pull/25276 -We have to save and restore this as well. - -Python 3.13 removed PyThreadState.cframe (GH-108035). -*/ -#if GREENLET_PY310 && PY_VERSION_HEX < 0x30D0000 -# define GREENLET_USE_CFRAME 1 -#else -# define GREENLET_USE_CFRAME 0 -#endif - - -#if PY_VERSION_HEX >= 0x30B00A4 -/* -Greenlet won't compile on anything older than Python 3.11 alpha 4 (see -https://bugs.python.org/issue46090). Summary of breaking internal changes: -- Python 3.11 alpha 1 changed how frame objects are represented internally. - - https://github.com/python/cpython/pull/30122 -- Python 3.11 alpha 3 changed how recursion limits are stored. - - https://github.com/python/cpython/pull/29524 -- Python 3.11 alpha 4 changed how exception state is stored. It also includes a - change to help greenlet save and restore the interpreter frame "data stack". - - https://github.com/python/cpython/pull/30122 - - https://github.com/python/cpython/pull/30234 -*/ -# define GREENLET_PY311 1 -#else -# define GREENLET_PY311 0 -#endif - - -#if PY_VERSION_HEX >= 0x30C0000 -# define GREENLET_PY312 1 -#else -# define GREENLET_PY312 0 -#endif - -#if PY_VERSION_HEX >= 0x30D0000 -# define GREENLET_PY313 1 -#else -# define GREENLET_PY313 0 -#endif - -#ifndef Py_SET_REFCNT -/* Py_REFCNT and Py_SIZE macros are converted to functions -https://bugs.python.org/issue39573 */ -# define Py_SET_REFCNT(obj, refcnt) Py_REFCNT(obj) = (refcnt) -#endif - -#ifndef _Py_DEC_REFTOTAL -/* _Py_DEC_REFTOTAL macro has been removed from Python 3.9 by: - https://github.com/python/cpython/commit/49932fec62c616ec88da52642339d83ae719e924 - - The symbol we use to replace it was removed by at least 3.12. -*/ -# ifdef Py_REF_DEBUG -# if GREENLET_PY312 -# define _Py_DEC_REFTOTAL -# else -# define _Py_DEC_REFTOTAL _Py_RefTotal-- -# endif -# else -# define _Py_DEC_REFTOTAL -# endif -#endif -// Define these flags like Cython does if we're on an old version. -#ifndef Py_TPFLAGS_CHECKTYPES - #define Py_TPFLAGS_CHECKTYPES 0 -#endif -#ifndef Py_TPFLAGS_HAVE_INDEX - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif -#ifndef Py_TPFLAGS_HAVE_NEWBUFFER - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif - -#ifndef Py_TPFLAGS_HAVE_VERSION_TAG - #define Py_TPFLAGS_HAVE_VERSION_TAG 0 -#endif - -#define G_TPFLAGS_DEFAULT Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_VERSION_TAG | Py_TPFLAGS_CHECKTYPES | Py_TPFLAGS_HAVE_NEWBUFFER | Py_TPFLAGS_HAVE_GC - - -#if PY_VERSION_HEX < 0x03090000 -// The official version only became available in 3.9 -# define PyObject_GC_IsTracked(o) _PyObject_GC_IS_TRACKED(o) -#endif - - -// bpo-43760 added PyThreadState_EnterTracing() to Python 3.11.0a2 -#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) -static inline void PyThreadState_EnterTracing(PyThreadState *tstate) -{ - tstate->tracing++; -#if PY_VERSION_HEX >= 0x030A00A1 - tstate->cframe->use_tracing = 0; -#else - tstate->use_tracing = 0; -#endif -} -#endif - -// bpo-43760 added PyThreadState_LeaveTracing() to Python 3.11.0a2 -#if PY_VERSION_HEX < 0x030B00A2 && !defined(PYPY_VERSION) -static inline void PyThreadState_LeaveTracing(PyThreadState *tstate) -{ - tstate->tracing--; - int use_tracing = (tstate->c_tracefunc != NULL - || tstate->c_profilefunc != NULL); -#if PY_VERSION_HEX >= 0x030A00A1 - tstate->cframe->use_tracing = use_tracing; -#else - tstate->use_tracing = use_tracing; -#endif -} -#endif - -#if !defined(Py_C_RECURSION_LIMIT) && defined(C_RECURSION_LIMIT) -# define Py_C_RECURSION_LIMIT C_RECURSION_LIMIT -#endif - -#endif /* GREENLET_CPYTHON_COMPAT_H */ diff --git a/myenv/Lib/site-packages/greenlet/greenlet_exceptions.hpp b/myenv/Lib/site-packages/greenlet/greenlet_exceptions.hpp deleted file mode 100644 index 617f07c..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet_exceptions.hpp +++ /dev/null @@ -1,171 +0,0 @@ -#ifndef GREENLET_EXCEPTIONS_HPP -#define GREENLET_EXCEPTIONS_HPP - -#define PY_SSIZE_T_CLEAN -#include -#include -#include - -#ifdef __clang__ -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wunused-function" -#endif - -namespace greenlet { - - class PyErrOccurred : public std::runtime_error - { - public: - - // CAUTION: In debug builds, may run arbitrary Python code. - static const PyErrOccurred - from_current() - { - assert(PyErr_Occurred()); -#ifndef NDEBUG - // This is not exception safe, and - // not necessarily safe in general (what if it switches?) - // But we only do this in debug mode, where we are in - // tight control of what exceptions are getting raised and - // can prevent those issues. - - // You can't call PyObject_Str with a pending exception. - PyObject* typ; - PyObject* val; - PyObject* tb; - - PyErr_Fetch(&typ, &val, &tb); - PyObject* typs = PyObject_Str(typ); - PyObject* vals = PyObject_Str(val ? val : typ); - const char* typ_msg = PyUnicode_AsUTF8(typs); - const char* val_msg = PyUnicode_AsUTF8(vals); - PyErr_Restore(typ, val, tb); - - std::string msg(typ_msg); - msg += ": "; - msg += val_msg; - PyErrOccurred ex(msg); - Py_XDECREF(typs); - Py_XDECREF(vals); - - return ex; -#else - return PyErrOccurred(); -#endif - } - - PyErrOccurred() : std::runtime_error("") - { - assert(PyErr_Occurred()); - } - - PyErrOccurred(const std::string& msg) : std::runtime_error(msg) - { - assert(PyErr_Occurred()); - } - - PyErrOccurred(PyObject* exc_kind, const char* const msg) - : std::runtime_error(msg) - { - PyErr_SetString(exc_kind, msg); - } - - PyErrOccurred(PyObject* exc_kind, const std::string msg) - : std::runtime_error(msg) - { - // This copies the c_str, so we don't have any lifetime - // issues to worry about. - PyErr_SetString(exc_kind, msg.c_str()); - } - - PyErrOccurred(PyObject* exc_kind, - const std::string msg, //This is the format - //string; that's not - //usually safe! - - PyObject* borrowed_obj_one, PyObject* borrowed_obj_two) - : std::runtime_error(msg) - { - - //This is designed specifically for the - //``check_switch_allowed`` function. - - // PyObject_Str and PyObject_Repr are safe to call with - // NULL pointers; they return the string "" in that - // case. - // This function always returns null. - PyErr_Format(exc_kind, - msg.c_str(), - borrowed_obj_one, borrowed_obj_two); - } - }; - - class TypeError : public PyErrOccurred - { - public: - TypeError(const char* const what) - : PyErrOccurred(PyExc_TypeError, what) - { - } - TypeError(const std::string what) - : PyErrOccurred(PyExc_TypeError, what) - { - } - }; - - class ValueError : public PyErrOccurred - { - public: - ValueError(const char* const what) - : PyErrOccurred(PyExc_ValueError, what) - { - } - }; - - class AttributeError : public PyErrOccurred - { - public: - AttributeError(const char* const what) - : PyErrOccurred(PyExc_AttributeError, what) - { - } - }; - - /** - * Calls `Py_FatalError` when constructed, so you can't actually - * throw this. It just makes static analysis easier. - */ - class PyFatalError : public std::runtime_error - { - public: - PyFatalError(const char* const msg) - : std::runtime_error(msg) - { - Py_FatalError(msg); - } - }; - - static inline PyObject* - Require(PyObject* p, const std::string& msg="") - { - if (!p) { - throw PyErrOccurred(msg); - } - return p; - }; - - static inline void - Require(const int retval) - { - if (retval < 0) { - throw PyErrOccurred(); - } - }; - - -}; -#ifdef __clang__ -# pragma clang diagnostic pop -#endif - -#endif diff --git a/myenv/Lib/site-packages/greenlet/greenlet_internal.hpp b/myenv/Lib/site-packages/greenlet/greenlet_internal.hpp deleted file mode 100644 index f2b15d5..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet_internal.hpp +++ /dev/null @@ -1,107 +0,0 @@ -/* -*- indent-tabs-mode: nil; tab-width: 4; -*- */ -#ifndef GREENLET_INTERNAL_H -#define GREENLET_INTERNAL_H -#ifdef __clang__ -# pragma clang diagnostic push -# pragma clang diagnostic ignored "-Wunused-function" -#endif - -/** - * Implementation helpers. - * - * C++ templates and inline functions should go here. - */ -#define PY_SSIZE_T_CLEAN -#include "greenlet_compiler_compat.hpp" -#include "greenlet_cpython_compat.hpp" -#include "greenlet_exceptions.hpp" -#include "TGreenlet.hpp" -#include "greenlet_allocator.hpp" - -#include -#include - -#define GREENLET_MODULE -struct _greenlet; -typedef struct _greenlet PyGreenlet; -namespace greenlet { - - class ThreadState; - // We can't use the PythonAllocator for this, because we push to it - // from the thread state destructor, which doesn't have the GIL, - // and Python's allocators can only be called with the GIL. - typedef std::vector cleanup_queue_t; - -}; - - -#define implementation_ptr_t greenlet::Greenlet* - - -#include "greenlet.h" - -void -greenlet::refs::MainGreenletExactChecker(void *p) -{ - if (!p) { - return; - } - // We control the class of the main greenlet exactly. - if (Py_TYPE(p) != &PyGreenlet_Type) { - std::string err("MainGreenlet: Expected exactly a greenlet, not a "); - err += Py_TYPE(p)->tp_name; - throw greenlet::TypeError(err); - } - - // Greenlets from dead threads no longer respond to main() with a - // true value; so in that case we need to perform an additional - // check. - Greenlet* g = static_cast(p)->pimpl; - if (g->main()) { - return; - } - if (!dynamic_cast(g)) { - std::string err("MainGreenlet: Expected exactly a main greenlet, not a "); - err += Py_TYPE(p)->tp_name; - throw greenlet::TypeError(err); - } -} - - - -template -inline greenlet::Greenlet* greenlet::refs::_OwnedGreenlet::operator->() const noexcept -{ - return reinterpret_cast(this->p)->pimpl; -} - -template -inline greenlet::Greenlet* greenlet::refs::_BorrowedGreenlet::operator->() const noexcept -{ - return reinterpret_cast(this->p)->pimpl; -} - -#include -#include - - -extern PyTypeObject PyGreenlet_Type; - - - -/** - * Forward declarations needed in multiple files. - */ -static PyObject* green_switch(PyGreenlet* self, PyObject* args, PyObject* kwargs); - - -#ifdef __clang__ -# pragma clang diagnostic pop -#endif - - -#endif - -// Local Variables: -// flycheck-clang-include-path: ("../../include" "/opt/local/Library/Frameworks/Python.framework/Versions/3.10/include/python3.10") -// End: diff --git a/myenv/Lib/site-packages/greenlet/greenlet_refs.hpp b/myenv/Lib/site-packages/greenlet/greenlet_refs.hpp deleted file mode 100644 index b7e5e3f..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet_refs.hpp +++ /dev/null @@ -1,1118 +0,0 @@ -#ifndef GREENLET_REFS_HPP -#define GREENLET_REFS_HPP - -#define PY_SSIZE_T_CLEAN -#include - -#include - -//#include "greenlet_internal.hpp" -#include "greenlet_compiler_compat.hpp" -#include "greenlet_cpython_compat.hpp" -#include "greenlet_exceptions.hpp" - -struct _greenlet; -struct _PyMainGreenlet; - -typedef struct _greenlet PyGreenlet; -extern PyTypeObject PyGreenlet_Type; - - -#ifdef GREENLET_USE_STDIO -#include -using std::cerr; -using std::endl; -#endif - -namespace greenlet -{ - class Greenlet; - - namespace refs - { - // Type checkers throw a TypeError if the argument is not - // null, and isn't of the required Python type. - // (We can't use most of the defined type checkers - // like PyList_Check, etc, directly, because they are - // implemented as macros.) - typedef void (*TypeChecker)(void*); - - void - NoOpChecker(void*) - { - return; - } - - void - GreenletChecker(void *p) - { - if (!p) { - return; - } - - PyTypeObject* typ = Py_TYPE(p); - // fast, common path. (PyObject_TypeCheck is a macro or - // static inline function, and it also does a - // direct comparison of the type pointers, but its fast - // path only handles one type) - if (typ == &PyGreenlet_Type) { - return; - } - - if (!PyObject_TypeCheck(p, &PyGreenlet_Type)) { - std::string err("GreenletChecker: Expected any type of greenlet, not "); - err += Py_TYPE(p)->tp_name; - throw TypeError(err); - } - } - - void - MainGreenletExactChecker(void *p); - - template - class PyObjectPointer; - - template - class OwnedReference; - - - template - class BorrowedReference; - - typedef BorrowedReference BorrowedObject; - typedef OwnedReference OwnedObject; - - class ImmortalObject; - class ImmortalString; - - template - class _OwnedGreenlet; - - typedef _OwnedGreenlet OwnedGreenlet; - typedef _OwnedGreenlet OwnedMainGreenlet; - - template - class _BorrowedGreenlet; - - typedef _BorrowedGreenlet BorrowedGreenlet; - - void - ContextExactChecker(void *p) - { - if (!p) { - return; - } - if (!PyContext_CheckExact(p)) { - throw TypeError( - "greenlet context must be a contextvars.Context or None" - ); - } - } - - typedef OwnedReference OwnedContext; - } -} - -namespace greenlet { - - - namespace refs { - // A set of classes to make reference counting rules in python - // code explicit. - // - // Rules of use: - // (1) Functions returning a new reference that the caller of the - // function is expected to dispose of should return a - // ``OwnedObject`` object. This object automatically releases its - // reference when it goes out of scope. It works like a ``std::shared_ptr`` - // and can be copied or used as a function parameter (but don't do - // that). Note that constructing a ``OwnedObject`` from a - // PyObject* steals the reference. - // (2) Parameters to functions should be either a - // ``OwnedObject&``, or, more generally, a ``PyObjectPointer&``. - // If the function needs to create its own new reference, it can - // do so by copying to a local ``OwnedObject``. - // (3) Functions returning an existing pointer that is NOT - // incref'd, and which the caller MUST NOT decref, - // should return a ``BorrowedObject``. - - // XXX: The following two paragraphs do not hold for all platforms. - // Notably, 32-bit PPC Linux passes structs by reference, not by - // value, so this actually doesn't work. (Although that's the only - // platform that doesn't work on.) DO NOT ATTEMPT IT. The - // unfortunate consequence of that is that the slots which we - // *know* are already type safe will wind up calling the type - // checker function (when we had the slots accepting - // BorrowedGreenlet, this was bypassed), so this slows us down. - // TODO: Optimize this again. - - // For a class with a single pointer member, whose constructor - // does nothing but copy a pointer parameter into the member, and - // which can then be converted back to the pointer type, compilers - // generate code that's the same as just passing the pointer. - // That is, func(BorrowedObject x) called like ``PyObject* p = - // ...; f(p)`` has 0 overhead. Similarly, they "unpack" to the - // pointer type with 0 overhead. - // - // If there are no virtual functions, no complex inheritance (maybe?) and - // no destructor, these can be directly used as parameters in - // Python callbacks like tp_init: the layout is the same as a - // single pointer. Only subclasses with trivial constructors that - // do nothing but set the single pointer member are safe to use - // that way. - - - // This is the base class for things that can be done with a - // PyObject pointer. It assumes nothing about memory management. - // NOTE: Nothing is virtual, so subclasses shouldn't add new - // storage fields or try to override these methods. - template - class PyObjectPointer - { - public: - typedef T PyType; - protected: - T* p; - public: - PyObjectPointer(T* it=nullptr) : p(it) - { - TC(p); - } - - // We don't allow automatic casting to PyObject* at this - // level, because then we could be passed to Py_DECREF/INCREF, - // but we want nothing to do with memory management. If you - // know better, then you can use the get() method, like on a - // std::shared_ptr. Except we name it borrow() to clarify that - // if this is a reference-tracked object, the pointer you get - // back will go away when the object does. - // TODO: This should probably not exist here, but be moved - // down to relevant sub-types. - - T* borrow() const noexcept - { - return this->p; - } - - PyObject* borrow_o() const noexcept - { - return reinterpret_cast(this->p); - } - - T* operator->() const noexcept - { - return this->p; - } - - bool is_None() const noexcept - { - return this->p == Py_None; - } - - PyObject* acquire_or_None() const noexcept - { - PyObject* result = this->p ? reinterpret_cast(this->p) : Py_None; - Py_INCREF(result); - return result; - } - - explicit operator bool() const noexcept - { - return this->p != nullptr; - } - - bool operator!() const noexcept - { - return this->p == nullptr; - } - - Py_ssize_t REFCNT() const noexcept - { - return p ? Py_REFCNT(p) : -42; - } - - PyTypeObject* TYPE() const noexcept - { - return p ? Py_TYPE(p) : nullptr; - } - - inline OwnedObject PyStr() const noexcept; - inline const std::string as_str() const noexcept; - inline OwnedObject PyGetAttr(const ImmortalObject& name) const noexcept; - inline OwnedObject PyRequireAttr(const char* const name) const; - inline OwnedObject PyRequireAttr(const ImmortalString& name) const; - inline OwnedObject PyCall(const BorrowedObject& arg) const; - inline OwnedObject PyCall(PyGreenlet* arg) const ; - inline OwnedObject PyCall(PyObject* arg) const ; - // PyObject_Call(this, args, kwargs); - inline OwnedObject PyCall(const BorrowedObject args, - const BorrowedObject kwargs) const; - inline OwnedObject PyCall(const OwnedObject& args, - const OwnedObject& kwargs) const; - - protected: - void _set_raw_pointer(void* t) - { - TC(t); - p = reinterpret_cast(t); - } - void* _get_raw_pointer() const - { - return p; - } - }; - -#ifdef GREENLET_USE_STDIO - template - std::ostream& operator<<(std::ostream& os, const PyObjectPointer& s) - { - const std::type_info& t = typeid(s); - os << t.name() - << "(addr=" << s.borrow() - << ", refcnt=" << s.REFCNT() - << ", value=" << s.as_str() - << ")"; - - return os; - } -#endif - - template - inline bool operator==(const PyObjectPointer& lhs, const PyObject* const rhs) noexcept - { - return static_cast(lhs.borrow_o()) == static_cast(rhs); - } - - template - inline bool operator==(const PyObjectPointer& lhs, const PyObjectPointer& rhs) noexcept - { - return lhs.borrow_o() == rhs.borrow_o(); - } - - template - inline bool operator!=(const PyObjectPointer& lhs, - const PyObjectPointer& rhs) noexcept - { - return lhs.borrow_o() != rhs.borrow_o(); - } - - template - class OwnedReference : public PyObjectPointer - { - private: - friend class OwnedList; - - protected: - explicit OwnedReference(T* it) : PyObjectPointer(it) - { - } - - public: - - // Constructors - - static OwnedReference consuming(PyObject* p) - { - return OwnedReference(reinterpret_cast(p)); - } - - static OwnedReference owning(T* p) - { - OwnedReference result(p); - Py_XINCREF(result.p); - return result; - } - - OwnedReference() : PyObjectPointer(nullptr) - {} - - explicit OwnedReference(const PyObjectPointer<>& other) - : PyObjectPointer(nullptr) - { - T* op = other.borrow(); - TC(op); - this->p = other.borrow(); - Py_XINCREF(this->p); - } - - // It would be good to make use of the C++11 distinction - // between move and copy operations, e.g., constructing from a - // pointer should be a move operation. - // In the common case of ``OwnedObject x = Py_SomeFunction()``, - // the call to the copy constructor will be elided completely. - OwnedReference(const OwnedReference& other) - : PyObjectPointer(other.p) - { - Py_XINCREF(this->p); - } - - static OwnedReference None() - { - Py_INCREF(Py_None); - return OwnedReference(Py_None); - } - - // We can assign from exactly our type without any extra checking - OwnedReference& operator=(const OwnedReference& other) - { - Py_XINCREF(other.p); - const T* tmp = this->p; - this->p = other.p; - Py_XDECREF(tmp); - return *this; - } - - OwnedReference& operator=(const BorrowedReference other) - { - return this->operator=(other.borrow()); - } - - OwnedReference& operator=(T* const other) - { - TC(other); - Py_XINCREF(other); - T* tmp = this->p; - this->p = other; - Py_XDECREF(tmp); - return *this; - } - - // We can assign from an arbitrary reference type - // if it passes our check. - template - OwnedReference& operator=(const OwnedReference& other) - { - X* op = other.borrow(); - TC(op); - return this->operator=(reinterpret_cast(op)); - } - - inline void steal(T* other) - { - assert(this->p == nullptr); - TC(other); - this->p = other; - } - - T* relinquish_ownership() - { - T* result = this->p; - this->p = nullptr; - return result; - } - - T* acquire() const - { - // Return a new reference. - // TODO: This may go away when we have reference objects - // throughout the code. - Py_XINCREF(this->p); - return this->p; - } - - // Nothing else declares a destructor, we're the leaf, so we - // should be able to get away without virtual. - ~OwnedReference() - { - Py_CLEAR(this->p); - } - - void CLEAR() - { - Py_CLEAR(this->p); - assert(this->p == nullptr); - } - }; - - static inline - void operator<<=(PyObject*& target, OwnedObject& o) - { - target = o.relinquish_ownership(); - } - - - class NewReference : public OwnedObject - { - private: - G_NO_COPIES_OF_CLS(NewReference); - public: - // Consumes the reference. Only use this - // for API return values. - NewReference(PyObject* it) : OwnedObject(it) - { - } - }; - - class NewDictReference : public NewReference - { - private: - G_NO_COPIES_OF_CLS(NewDictReference); - public: - NewDictReference() : NewReference(PyDict_New()) - { - if (!this->p) { - throw PyErrOccurred(); - } - } - - void SetItem(const char* const key, PyObject* value) - { - Require(PyDict_SetItemString(this->p, key, value)); - } - - void SetItem(const PyObjectPointer<>& key, PyObject* value) - { - Require(PyDict_SetItem(this->p, key.borrow_o(), value)); - } - }; - - template - class _OwnedGreenlet: public OwnedReference - { - private: - protected: - _OwnedGreenlet(T* it) : OwnedReference(it) - {} - - public: - _OwnedGreenlet() : OwnedReference() - {} - - _OwnedGreenlet(const _OwnedGreenlet& other) : OwnedReference(other) - { - } - _OwnedGreenlet(OwnedMainGreenlet& other) : - OwnedReference(reinterpret_cast(other.acquire())) - { - } - _OwnedGreenlet(const BorrowedGreenlet& other); - // Steals a reference. - static _OwnedGreenlet consuming(PyGreenlet* it) - { - return _OwnedGreenlet(reinterpret_cast(it)); - } - - inline _OwnedGreenlet& operator=(const OwnedGreenlet& other) - { - return this->operator=(other.borrow()); - } - - inline _OwnedGreenlet& operator=(const BorrowedGreenlet& other); - - _OwnedGreenlet& operator=(const OwnedMainGreenlet& other) - { - PyGreenlet* owned = other.acquire(); - Py_XDECREF(this->p); - this->p = reinterpret_cast(owned); - return *this; - } - - _OwnedGreenlet& operator=(T* const other) - { - OwnedReference::operator=(other); - return *this; - } - - T* relinquish_ownership() - { - T* result = this->p; - this->p = nullptr; - return result; - } - - PyObject* relinquish_ownership_o() - { - return reinterpret_cast(relinquish_ownership()); - } - - inline Greenlet* operator->() const noexcept; - inline operator Greenlet*() const noexcept; - }; - - template - class BorrowedReference : public PyObjectPointer - { - public: - // Allow implicit creation from PyObject* pointers as we - // transition to using these classes. Also allow automatic - // conversion to PyObject* for passing to C API calls and even - // for Py_INCREF/DECREF, because we ourselves do no memory management. - BorrowedReference(T* it) : PyObjectPointer(it) - {} - - BorrowedReference(const PyObjectPointer& ref) : PyObjectPointer(ref.borrow()) - {} - - BorrowedReference() : PyObjectPointer(nullptr) - {} - - operator T*() const - { - return this->p; - } - }; - - typedef BorrowedReference BorrowedObject; - //typedef BorrowedReference BorrowedGreenlet; - - template - class _BorrowedGreenlet : public BorrowedReference - { - public: - _BorrowedGreenlet() : - BorrowedReference(nullptr) - {} - - _BorrowedGreenlet(T* it) : - BorrowedReference(it) - {} - - _BorrowedGreenlet(const BorrowedObject& it); - - _BorrowedGreenlet(const OwnedGreenlet& it) : - BorrowedReference(it.borrow()) - {} - - _BorrowedGreenlet& operator=(const BorrowedObject& other); - - // We get one of these for PyGreenlet, but one for PyObject - // is handy as well - operator PyObject*() const - { - return reinterpret_cast(this->p); - } - Greenlet* operator->() const noexcept; - operator Greenlet*() const noexcept; - }; - - typedef _BorrowedGreenlet BorrowedGreenlet; - - template - _OwnedGreenlet::_OwnedGreenlet(const BorrowedGreenlet& other) - : OwnedReference(reinterpret_cast(other.borrow())) - { - Py_XINCREF(this->p); - } - - - class BorrowedMainGreenlet - : public _BorrowedGreenlet - { - public: - BorrowedMainGreenlet(const OwnedMainGreenlet& it) : - _BorrowedGreenlet(it.borrow()) - {} - BorrowedMainGreenlet(PyGreenlet* it=nullptr) - : _BorrowedGreenlet(it) - {} - }; - - template - _OwnedGreenlet& _OwnedGreenlet::operator=(const BorrowedGreenlet& other) - { - return this->operator=(other.borrow()); - } - - - class ImmortalObject : public PyObjectPointer<> - { - private: - G_NO_ASSIGNMENT_OF_CLS(ImmortalObject); - public: - explicit ImmortalObject(PyObject* it) : PyObjectPointer<>(it) - { - } - - ImmortalObject(const ImmortalObject& other) - : PyObjectPointer<>(other.p) - { - - } - - /** - * Become the new owner of the object. Does not change the - * reference count. - */ - ImmortalObject& operator=(PyObject* it) - { - assert(this->p == nullptr); - this->p = it; - return *this; - } - - static ImmortalObject consuming(PyObject* it) - { - return ImmortalObject(it); - } - - inline operator PyObject*() const - { - return this->p; - } - }; - - class ImmortalString : public ImmortalObject - { - private: - G_NO_COPIES_OF_CLS(ImmortalString); - const char* str; - public: - ImmortalString(const char* const str) : - ImmortalObject(str ? Require(PyUnicode_InternFromString(str)) : nullptr) - { - this->str = str; - } - - inline ImmortalString& operator=(const char* const str) - { - if (!this->p) { - this->p = Require(PyUnicode_InternFromString(str)); - this->str = str; - } - else { - assert(this->str == str); - } - return *this; - } - - inline operator std::string() const - { - return this->str; - } - - }; - - class ImmortalEventName : public ImmortalString - { - private: - G_NO_COPIES_OF_CLS(ImmortalEventName); - public: - ImmortalEventName(const char* const str) : ImmortalString(str) - {} - }; - - class ImmortalException : public ImmortalObject - { - private: - G_NO_COPIES_OF_CLS(ImmortalException); - public: - ImmortalException(const char* const name, PyObject* base=nullptr) : - ImmortalObject(name - // Python 2.7 isn't const correct - ? Require(PyErr_NewException((char*)name, base, nullptr)) - : nullptr) - {} - - inline bool PyExceptionMatches() const - { - return PyErr_ExceptionMatches(this->p) > 0; - } - - }; - - template - inline OwnedObject PyObjectPointer::PyStr() const noexcept - { - if (!this->p) { - return OwnedObject(); - } - return OwnedObject::consuming(PyObject_Str(reinterpret_cast(this->p))); - } - - template - inline const std::string PyObjectPointer::as_str() const noexcept - { - // NOTE: This is not Python exception safe. - if (this->p) { - // The Python APIs return a cached char* value that's only valid - // as long as the original object stays around, and we're - // about to (probably) toss it. Hence the copy to std::string. - OwnedObject py_str = this->PyStr(); - if (!py_str) { - return "(nil)"; - } - return PyUnicode_AsUTF8(py_str.borrow()); - } - return "(nil)"; - } - - template - inline OwnedObject PyObjectPointer::PyGetAttr(const ImmortalObject& name) const noexcept - { - assert(this->p); - return OwnedObject::consuming(PyObject_GetAttr(reinterpret_cast(this->p), name)); - } - - template - inline OwnedObject PyObjectPointer::PyRequireAttr(const char* const name) const - { - assert(this->p); - return OwnedObject::consuming(Require(PyObject_GetAttrString(this->p, name), name)); - } - - template - inline OwnedObject PyObjectPointer::PyRequireAttr(const ImmortalString& name) const - { - assert(this->p); - return OwnedObject::consuming(Require( - PyObject_GetAttr( - reinterpret_cast(this->p), - name - ), - name - )); - } - - template - inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject& arg) const - { - return this->PyCall(arg.borrow()); - } - - template - inline OwnedObject PyObjectPointer::PyCall(PyGreenlet* arg) const - { - return this->PyCall(reinterpret_cast(arg)); - } - - template - inline OwnedObject PyObjectPointer::PyCall(PyObject* arg) const - { - assert(this->p); - return OwnedObject::consuming(PyObject_CallFunctionObjArgs(this->p, arg, NULL)); - } - - template - inline OwnedObject PyObjectPointer::PyCall(const BorrowedObject args, - const BorrowedObject kwargs) const - { - assert(this->p); - return OwnedObject::consuming(PyObject_Call(this->p, args, kwargs)); - } - - template - inline OwnedObject PyObjectPointer::PyCall(const OwnedObject& args, - const OwnedObject& kwargs) const - { - assert(this->p); - return OwnedObject::consuming(PyObject_Call(this->p, args.borrow(), kwargs.borrow())); - } - - inline void - ListChecker(void * p) - { - if (!p) { - return; - } - if (!PyList_Check(p)) { - throw TypeError("Expected a list"); - } - } - - class OwnedList : public OwnedReference - { - private: - G_NO_ASSIGNMENT_OF_CLS(OwnedList); - public: - // TODO: Would like to use move. - explicit OwnedList(const OwnedObject& other) - : OwnedReference(other) - { - } - - OwnedList& operator=(const OwnedObject& other) - { - if (other && PyList_Check(other.p)) { - // Valid list. Own a new reference to it, discard the - // reference to what we did own. - PyObject* new_ptr = other.p; - Py_INCREF(new_ptr); - Py_XDECREF(this->p); - this->p = new_ptr; - } - else { - // Either the other object was NULL (an error) or it - // wasn't a list. Either way, we're now invalidated. - Py_XDECREF(this->p); - this->p = nullptr; - } - return *this; - } - - inline bool empty() const - { - return PyList_GET_SIZE(p) == 0; - } - - inline Py_ssize_t size() const - { - return PyList_GET_SIZE(p); - } - - inline BorrowedObject at(const Py_ssize_t index) const - { - return PyList_GET_ITEM(p, index); - } - - inline void clear() - { - PyList_SetSlice(p, 0, PyList_GET_SIZE(p), NULL); - } - }; - - // Use this to represent the module object used at module init - // time. - // This could either be a borrowed (Py2) or new (Py3) reference; - // either way, we don't want to do any memory management - // on it here, Python itself will handle that. - // XXX: Actually, that's not quite right. On Python 3, if an - // exception occurs before we return to the interpreter, this will - // leak; but all previous versions also had that problem. - class CreatedModule : public PyObjectPointer<> - { - private: - G_NO_COPIES_OF_CLS(CreatedModule); - public: - CreatedModule(PyModuleDef& mod_def) : PyObjectPointer<>( - Require(PyModule_Create(&mod_def))) - { - } - - // PyAddObject(): Add a reference to the object to the module. - // On return, the reference count of the object is unchanged. - // - // The docs warn that PyModule_AddObject only steals the - // reference on success, so if it fails after we've incref'd - // or allocated, we're responsible for the decref. - void PyAddObject(const char* name, const long new_bool) - { - OwnedObject p = OwnedObject::consuming(Require(PyBool_FromLong(new_bool))); - this->PyAddObject(name, p); - } - - void PyAddObject(const char* name, const OwnedObject& new_object) - { - // The caller already owns a reference they will decref - // when their variable goes out of scope, we still need to - // incref/decref. - this->PyAddObject(name, new_object.borrow()); - } - - void PyAddObject(const char* name, const ImmortalObject& new_object) - { - this->PyAddObject(name, new_object.borrow()); - } - - void PyAddObject(const char* name, PyTypeObject& type) - { - this->PyAddObject(name, reinterpret_cast(&type)); - } - - void PyAddObject(const char* name, PyObject* new_object) - { - Py_INCREF(new_object); - try { - Require(PyModule_AddObject(this->p, name, new_object)); - } - catch (const PyErrOccurred&) { - Py_DECREF(p); - throw; - } - } - }; - - class PyErrFetchParam : public PyObjectPointer<> - { - // Not an owned object, because we can't be initialized with - // one, and we only sometimes acquire ownership. - private: - G_NO_COPIES_OF_CLS(PyErrFetchParam); - public: - // To allow declaring these and passing them to - // PyErr_Fetch we implement the empty constructor, - // and the address operator. - PyErrFetchParam() : PyObjectPointer<>(nullptr) - { - } - - PyObject** operator&() - { - return &this->p; - } - - // This allows us to pass one directly without the &, - // BUT it has higher precedence than the bool operator - // if it's not explicit. - operator PyObject**() - { - return &this->p; - } - - // We don't want to be able to pass these to Py_DECREF and - // such so we don't have the implicit PyObject* conversion. - - inline PyObject* relinquish_ownership() - { - PyObject* result = this->p; - this->p = nullptr; - return result; - } - - ~PyErrFetchParam() - { - Py_XDECREF(p); - } - }; - - class OwnedErrPiece : public OwnedObject - { - private: - - public: - // Unlike OwnedObject, this increments the refcount. - OwnedErrPiece(PyObject* p=nullptr) : OwnedObject(p) - { - this->acquire(); - } - - PyObject** operator&() - { - return &this->p; - } - - inline operator PyObject*() const - { - return this->p; - } - - operator PyTypeObject*() const - { - return reinterpret_cast(this->p); - } - }; - - class PyErrPieces - { - private: - OwnedErrPiece type; - OwnedErrPiece instance; - OwnedErrPiece traceback; - bool restored; - public: - // Takes new references; if we're destroyed before - // restoring the error, we drop the references. - PyErrPieces(PyObject* t, PyObject* v, PyObject* tb) : - type(t), - instance(v), - traceback(tb), - restored(0) - { - this->normalize(); - } - - PyErrPieces() : - restored(0) - { - // PyErr_Fetch transfers ownership to us, so - // we don't actually need to INCREF; but we *do* - // need to DECREF if we're not restored. - PyErrFetchParam t, v, tb; - PyErr_Fetch(&t, &v, &tb); - type.steal(t.relinquish_ownership()); - instance.steal(v.relinquish_ownership()); - traceback.steal(tb.relinquish_ownership()); - } - - void PyErrRestore() - { - // can only do this once - assert(!this->restored); - this->restored = true; - PyErr_Restore( - this->type.relinquish_ownership(), - this->instance.relinquish_ownership(), - this->traceback.relinquish_ownership()); - assert(!this->type && !this->instance && !this->traceback); - } - - private: - void normalize() - { - // First, check the traceback argument, replacing None, - // with NULL - if (traceback.is_None()) { - traceback = nullptr; - } - - if (traceback && !PyTraceBack_Check(traceback.borrow())) { - throw PyErrOccurred(PyExc_TypeError, - "throw() third argument must be a traceback object"); - } - - if (PyExceptionClass_Check(type)) { - // If we just had a type, we'll now have a type and - // instance. - // The type's refcount will have gone up by one - // because of the instance and the instance will have - // a refcount of one. Either way, we owned, and still - // do own, exactly one reference. - PyErr_NormalizeException(&type, &instance, &traceback); - - } - else if (PyExceptionInstance_Check(type)) { - /* Raising an instance --- usually that means an - object that is a subclass of BaseException, but on - Python 2, that can also mean an arbitrary old-style - object. The value should be a dummy. */ - if (instance && !instance.is_None()) { - throw PyErrOccurred( - PyExc_TypeError, - "instance exception may not have a separate value"); - } - /* Normalize to raise , */ - this->instance = this->type; - this->type = PyExceptionInstance_Class(instance.borrow()); - - /* - It would be tempting to do this: - - Py_ssize_t type_count = Py_REFCNT(Py_TYPE(instance.borrow())); - this->type = PyExceptionInstance_Class(instance.borrow()); - assert(this->type.REFCNT() == type_count + 1); - - But that doesn't work on Python 2 in the case of - old-style instances: The result of Py_TYPE is going to - be the global shared that all - old-style classes have, while the return of Instance_Class() - will be the Python-level class object. The two are unrelated. - */ - } - else { - /* Not something you can raise. throw() fails. */ - PyErr_Format(PyExc_TypeError, - "exceptions must be classes, or instances, not %s", - Py_TYPE(type.borrow())->tp_name); - throw PyErrOccurred(); - } - } - }; - - // PyArg_Parse's O argument returns a borrowed reference. - class PyArgParseParam : public BorrowedObject - { - private: - G_NO_COPIES_OF_CLS(PyArgParseParam); - public: - explicit PyArgParseParam(PyObject* p=nullptr) : BorrowedObject(p) - { - } - - inline PyObject** operator&() - { - return &this->p; - } - }; - -};}; - -#endif diff --git a/myenv/Lib/site-packages/greenlet/greenlet_slp_switch.hpp b/myenv/Lib/site-packages/greenlet/greenlet_slp_switch.hpp deleted file mode 100644 index bd4b7ae..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet_slp_switch.hpp +++ /dev/null @@ -1,99 +0,0 @@ -#ifndef GREENLET_SLP_SWITCH_HPP -#define GREENLET_SLP_SWITCH_HPP - -#include "greenlet_compiler_compat.hpp" -#include "greenlet_refs.hpp" - -/* - * the following macros are spliced into the OS/compiler - * specific code, in order to simplify maintenance. - */ -// We can save about 10% of the time it takes to switch greenlets if -// we thread the thread state through the slp_save_state() and the -// following slp_restore_state() calls from -// slp_switch()->g_switchstack() (which already needs to access it). -// -// However: -// -// that requires changing the prototypes and implementations of the -// switching functions. If we just change the prototype of -// slp_switch() to accept the argument and update the macros, without -// changing the implementation of slp_switch(), we get crashes on -// 64-bit Linux and 32-bit x86 (for reasons that aren't 100% clear); -// on the other hand, 64-bit macOS seems to be fine. Also, 64-bit -// windows is an issue because slp_switch is written fully in assembly -// and currently ignores its argument so some code would have to be -// adjusted there to pass the argument on to the -// ``slp_save_state_asm()`` function (but interestingly, because of -// the calling convention, the extra argument is just ignored and -// things function fine, albeit slower, if we just modify -// ``slp_save_state_asm`()` to fetch the pointer to pass to the -// macro.) -// -// Our compromise is to use a *glabal*, untracked, weak, pointer -// to the necessary thread state during the process of switching only. -// This is safe because we're protected by the GIL, and if we're -// running this code, the thread isn't exiting. This also nets us a -// 10-12% speed improvement. - -static greenlet::Greenlet* volatile switching_thread_state = nullptr; - - -extern "C" { -static int GREENLET_NOINLINE(slp_save_state_trampoline)(char* stackref); -static void GREENLET_NOINLINE(slp_restore_state_trampoline)(); -} - - -#define SLP_SAVE_STATE(stackref, stsizediff) \ -do { \ - assert(switching_thread_state); \ - stackref += STACK_MAGIC; \ - if (slp_save_state_trampoline((char*)stackref)) \ - return -1; \ - if (!switching_thread_state->active()) \ - return 1; \ - stsizediff = switching_thread_state->stack_start() - (char*)stackref; \ -} while (0) - -#define SLP_RESTORE_STATE() slp_restore_state_trampoline() - -#define SLP_EVAL -extern "C" { -#define slp_switch GREENLET_NOINLINE(slp_switch) -#include "slp_platformselect.h" -} -#undef slp_switch - -#ifndef STACK_MAGIC -# error \ - "greenlet needs to be ported to this platform, or taught how to detect your compiler properly." -#endif /* !STACK_MAGIC */ - - - -#ifdef EXTERNAL_ASM -/* CCP addition: Make these functions, to be called from assembler. - * The token include file for the given platform should enable the - * EXTERNAL_ASM define so that this is included. - */ -extern "C" { -intptr_t -slp_save_state_asm(intptr_t* ref) -{ - intptr_t diff; - SLP_SAVE_STATE(ref, diff); - return diff; -} - -void -slp_restore_state_asm(void) -{ - SLP_RESTORE_STATE(); -} - -extern int slp_switch(void); -}; -#endif - -#endif diff --git a/myenv/Lib/site-packages/greenlet/greenlet_thread_support.hpp b/myenv/Lib/site-packages/greenlet/greenlet_thread_support.hpp deleted file mode 100644 index 3ded7d2..0000000 --- a/myenv/Lib/site-packages/greenlet/greenlet_thread_support.hpp +++ /dev/null @@ -1,31 +0,0 @@ -#ifndef GREENLET_THREAD_SUPPORT_HPP -#define GREENLET_THREAD_SUPPORT_HPP - -/** - * Defines various utility functions to help greenlet integrate well - * with threads. This used to be needed when we supported Python - * 2.7 on Windows, which used a very old compiler. We wrote an - * alternative implementation using Python APIs and POSIX or Windows - * APIs, but that's no longer needed. So this file is a shadow of its - * former self --- but may be needed in the future. - */ - -#include -#include -#include - -#include "greenlet_compiler_compat.hpp" - -namespace greenlet { - typedef std::mutex Mutex; - typedef std::lock_guard LockGuard; - class LockInitError : public std::runtime_error - { - public: - LockInitError(const char* what) : std::runtime_error(what) - {}; - }; -}; - - -#endif /* GREENLET_THREAD_SUPPORT_HPP */ diff --git a/myenv/Lib/site-packages/greenlet/platform/__init__.py b/myenv/Lib/site-packages/greenlet/platform/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/greenlet/platform/setup_switch_x64_masm.cmd b/myenv/Lib/site-packages/greenlet/platform/setup_switch_x64_masm.cmd deleted file mode 100644 index 038ced2..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/setup_switch_x64_masm.cmd +++ /dev/null @@ -1,2 +0,0 @@ -call "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" amd64 -ml64 /nologo /c /Fo switch_x64_masm.obj switch_x64_masm.asm diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_aarch64_gcc.h b/myenv/Lib/site-packages/greenlet/platform/switch_aarch64_gcc.h deleted file mode 100644 index 058617c..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_aarch64_gcc.h +++ /dev/null @@ -1,124 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 07-Sep-16 Add clang support using x register naming. Fredrik Fornwall - * 13-Apr-13 Add support for strange GCC caller-save decisions - * 08-Apr-13 File creation. Michael Matz - * - * NOTES - * - * Simply save all callee saved registers - * - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 -#define REGS_TO_SAVE "x19", "x20", "x21", "x22", "x23", "x24", "x25", "x26", \ - "x27", "x28", "x30" /* aka lr */, \ - "v8", "v9", "v10", "v11", \ - "v12", "v13", "v14", "v15" - -/* - * Recall: - asm asm-qualifiers ( AssemblerTemplate - : OutputOperands - [ : InputOperands - [ : Clobbers ] ]) - - or (if asm-qualifiers contains 'goto') - - asm asm-qualifiers ( AssemblerTemplate - : OutputOperands - : InputOperands - : Clobbers - : GotoLabels) - - and OutputOperands are - - [ [asmSymbolicName] ] constraint (cvariablename) - - When a name is given, refer to it as ``%[the name]``. - When not given, ``%i`` where ``i`` is the zero-based index. - - constraints starting with ``=`` means only writing; ``+`` means - reading and writing. - - This is followed by ``r`` (must be register) or ``m`` (must be memory) - and these can be combined. - - The ``cvariablename`` is actually an lvalue expression. - - In AArch65, 31 general purpose registers. If named X0... they are - 64-bit. If named W0... they are the bottom 32 bits of the - corresponding 64 bit register. - - XZR and WZR are hardcoded to 0, and ignore writes. - - Arguments are in X0..X7. C++ uses X0 for ``this``. X0 holds simple return - values (?) - - Whenever a W register is written, the top half of the X register is zeroed. - */ - -static int -slp_switch(void) -{ - int err; - void *fp; - /* Windowz uses a 32-bit long on a 64-bit platform, unlike the rest of - the world, and in theory we can be compiled with GCC/llvm on 64-bit - windows. So we need a fixed-width type. - */ - int64_t *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("str x29, %0" : "=m"(fp) : : ); - __asm__ ("mov %0, sp" : "=r" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "add sp,sp,%0\n" - "add x29,x29,%0\n" - : - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - /* SLP_SAVE_STATE macro contains some return statements - (of -1 and 1). It falls through only when - the return value of slp_save_state() is zero, which - is placed in x0. - In that case we (slp_switch) also want to return zero - (also in x0 of course). - Now, some GCC versions (seen with 4.8) think it's a - good idea to save/restore x0 around the call to - slp_restore_state(), instead of simply zeroing it - at the return below. But slp_restore_state - writes random values to the stack slot used for this - save/restore (from when it once was saved above in - SLP_SAVE_STATE, when it was still uninitialized), so - "restoring" that precious zero actually makes us - return random values. There are some ways to make - GCC not use that zero value in the normal return path - (e.g. making err volatile, but that costs a little - stack space), and the simplest is to call a function - that returns an unknown value (which happens to be zero), - so the saved/restored value is unused. - - Thus, this line stores a 0 into the ``err`` variable - (which must be held in a register for this instruction, - of course). The ``w`` qualifier causes the instruction - to use W0 instead of X0, otherwise we get a warning - about a value size mismatch (because err is an int, - and aarch64 platforms are LP64: 32-bit int, 64 bit long - and pointer). - */ - __asm__ volatile ("mov %w0, #0" : "=r" (err)); - } - __asm__ volatile ("ldr x29, %0" : : "m" (fp) :); - __asm__ volatile ("" : : : REGS_TO_SAVE); - return err; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_alpha_unix.h b/myenv/Lib/site-packages/greenlet/platform/switch_alpha_unix.h deleted file mode 100644 index 7e07abf..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_alpha_unix.h +++ /dev/null @@ -1,30 +0,0 @@ -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "$9", "$10", "$11", "$12", "$13", "$14", "$15", \ - "$f2", "$f3", "$f4", "$f5", "$f6", "$f7", "$f8", "$f9" - -static int -slp_switch(void) -{ - int ret; - long *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("mov $30, %0" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "addq $30, %0, $30\n\t" - : /* no outputs */ - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("mov $31, %0" : "=r" (ret) : ); - return ret; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_amd64_unix.h b/myenv/Lib/site-packages/greenlet/platform/switch_amd64_unix.h deleted file mode 100644 index d470110..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_amd64_unix.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 3-May-13 Ralf Schmitt - * Add support for strange GCC caller-save decisions - * (ported from switch_aarch64_gcc.h) - * 18-Aug-11 Alexey Borzenkov - * Correctly save rbp, csr and cw - * 01-Apr-04 Hye-Shik Chang - * Ported from i386 to amd64. - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for spark - * 31-Avr-02 Armin Rigo - * Added ebx, esi and edi register-saves. - * 01-Mar-02 Samual M. Rushing - * Ported from i386. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -/* #define STACK_MAGIC 3 */ -/* the above works fine with gcc 2.96, but 2.95.3 wants this */ -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "r12", "r13", "r14", "r15" - -static int -slp_switch(void) -{ - int err; - void* rbp; - void* rbx; - unsigned int csr; - unsigned short cw; - /* This used to be declared 'register', but that does nothing in - modern compilers and is explicitly forbidden in some new - standards. */ - long *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("fstcw %0" : "=m" (cw)); - __asm__ volatile ("stmxcsr %0" : "=m" (csr)); - __asm__ volatile ("movq %%rbp, %0" : "=m" (rbp)); - __asm__ volatile ("movq %%rbx, %0" : "=m" (rbx)); - __asm__ ("movq %%rsp, %0" : "=g" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "addq %0, %%rsp\n" - "addq %0, %%rbp\n" - : - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - __asm__ volatile ("xorq %%rax, %%rax" : "=a" (err)); - } - __asm__ volatile ("movq %0, %%rbx" : : "m" (rbx)); - __asm__ volatile ("movq %0, %%rbp" : : "m" (rbp)); - __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); - __asm__ volatile ("fldcw %0" : : "m" (cw)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_arm32_gcc.h b/myenv/Lib/site-packages/greenlet/platform/switch_arm32_gcc.h deleted file mode 100644 index 655003a..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_arm32_gcc.h +++ /dev/null @@ -1,79 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 14-Aug-06 File creation. Ported from Arm Thumb. Sylvain Baro - * 3-Sep-06 Commented out saving of r1-r3 (r4 already commented out) as I - * read that these do not need to be saved. Also added notes and - * errors related to the frame pointer. Richard Tew. - * - * NOTES - * - * It is not possible to detect if fp is used or not, so the supplied - * switch function needs to support it, so that you can remove it if - * it does not apply to you. - * - * POSSIBLE ERRORS - * - * "fp cannot be used in asm here" - * - * - Try commenting out "fp" in REGS_TO_SAVE. - * - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 -#define REG_SP "sp" -#define REG_SPSP "sp,sp" -#ifdef __thumb__ -#define REG_FP "r7" -#define REG_FPFP "r7,r7" -#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r9", "r10", "r11", "lr" -#else -#define REG_FP "fp" -#define REG_FPFP "fp,fp" -#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r8", "r9", "r10", "lr" -#endif -#if defined(__SOFTFP__) -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL -#elif defined(__VFP_FP__) -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ - "d12", "d13", "d14", "d15" -#elif defined(__MAVERICK__) -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "mvf4", "mvf5", "mvf6", "mvf7", \ - "mvf8", "mvf9", "mvf10", "mvf11", \ - "mvf12", "mvf13", "mvf14", "mvf15" -#else -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "f4", "f5", "f6", "f7" -#endif - -static int -#ifdef __GNUC__ -__attribute__((optimize("no-omit-frame-pointer"))) -#endif -slp_switch(void) -{ - void *fp; - int *stackref, stsizediff; - int result; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("mov r0," REG_FP "\n\tstr r0,%0" : "=m" (fp) : : "r0"); - __asm__ ("mov %0," REG_SP : "=r" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "add " REG_SPSP ",%0\n" - "add " REG_FPFP ",%0\n" - : - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("ldr r0,%1\n\tmov " REG_FP ",r0\n\tmov %0, #0" : "=r" (result) : "m" (fp) : "r0"); - __asm__ volatile ("" : : : REGS_TO_SAVE); - return result; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_arm32_ios.h b/myenv/Lib/site-packages/greenlet/platform/switch_arm32_ios.h deleted file mode 100644 index 9e640e1..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_arm32_ios.h +++ /dev/null @@ -1,67 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 31-May-15 iOS support. Ported from arm32. Proton - * - * NOTES - * - * It is not possible to detect if fp is used or not, so the supplied - * switch function needs to support it, so that you can remove it if - * it does not apply to you. - * - * POSSIBLE ERRORS - * - * "fp cannot be used in asm here" - * - * - Try commenting out "fp" in REGS_TO_SAVE. - * - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 0 -#define REG_SP "sp" -#define REG_SPSP "sp,sp" -#define REG_FP "r7" -#define REG_FPFP "r7,r7" -#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r8", "r10", "r11", "lr" -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "d8", "d9", "d10", "d11", \ - "d12", "d13", "d14", "d15" - -static int -#ifdef __GNUC__ -__attribute__((optimize("no-omit-frame-pointer"))) -#endif -slp_switch(void) -{ - void *fp; - int *stackref, stsizediff, result; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("str " REG_FP ",%0" : "=m" (fp)); - __asm__ ("mov %0," REG_SP : "=r" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "add " REG_SPSP ",%0\n" - "add " REG_FPFP ",%0\n" - : - : "r" (stsizediff) - : REGS_TO_SAVE /* Clobber registers, force compiler to - * recalculate address of void *fp from REG_SP or REG_FP */ - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ( - "ldr " REG_FP ", %1\n\t" - "mov %0, #0" - : "=r" (result) - : "m" (fp) - : REGS_TO_SAVE /* Force compiler to restore saved registers after this */ - ); - return result; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_arm64_masm.asm b/myenv/Lib/site-packages/greenlet/platform/switch_arm64_masm.asm deleted file mode 100644 index 29f9c22..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_arm64_masm.asm +++ /dev/null @@ -1,53 +0,0 @@ - AREA switch_arm64_masm, CODE, READONLY; - GLOBAL slp_switch [FUNC] - EXTERN slp_save_state_asm - EXTERN slp_restore_state_asm - -slp_switch - ; push callee saved registers to stack - stp x19, x20, [sp, #-16]! - stp x21, x22, [sp, #-16]! - stp x23, x24, [sp, #-16]! - stp x25, x26, [sp, #-16]! - stp x27, x28, [sp, #-16]! - stp x29, x30, [sp, #-16]! - stp d8, d9, [sp, #-16]! - stp d10, d11, [sp, #-16]! - stp d12, d13, [sp, #-16]! - stp d14, d15, [sp, #-16]! - - ; call slp_save_state_asm with stack pointer - mov x0, sp - bl slp_save_state_asm - - ; early return for return value of 1 and -1 - cmp x0, #-1 - b.eq RETURN - cmp x0, #1 - b.eq RETURN - - ; increment stack and frame pointer - add sp, sp, x0 - add x29, x29, x0 - - bl slp_restore_state_asm - - ; store return value for successful completion of routine - mov x0, #0 - -RETURN - ; pop registers from stack - ldp d14, d15, [sp], #16 - ldp d12, d13, [sp], #16 - ldp d10, d11, [sp], #16 - ldp d8, d9, [sp], #16 - ldp x29, x30, [sp], #16 - ldp x27, x28, [sp], #16 - ldp x25, x26, [sp], #16 - ldp x23, x24, [sp], #16 - ldp x21, x22, [sp], #16 - ldp x19, x20, [sp], #16 - - ret - - END diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_arm64_masm.obj b/myenv/Lib/site-packages/greenlet/platform/switch_arm64_masm.obj deleted file mode 100644 index f6f220e..0000000 Binary files a/myenv/Lib/site-packages/greenlet/platform/switch_arm64_masm.obj and /dev/null differ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_arm64_msvc.h b/myenv/Lib/site-packages/greenlet/platform/switch_arm64_msvc.h deleted file mode 100644 index 7ab7f45..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_arm64_msvc.h +++ /dev/null @@ -1,17 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 21-Oct-21 Niyas Sait - * First version to enable win/arm64 support. - */ - -#define STACK_REFPLUS 1 -#define STACK_MAGIC 0 - -/* Use the generic support for an external assembly language slp_switch function. */ -#define EXTERNAL_ASM - -#ifdef SLP_EVAL -/* This always uses the external masm assembly file. */ -#endif \ No newline at end of file diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_csky_gcc.h b/myenv/Lib/site-packages/greenlet/platform/switch_csky_gcc.h deleted file mode 100644 index ac469d3..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_csky_gcc.h +++ /dev/null @@ -1,48 +0,0 @@ -#ifdef SLP_EVAL -#define STACK_MAGIC 0 -#define REG_FP "r8" -#ifdef __CSKYABIV2__ -#define REGS_TO_SAVE_GENERAL "r4", "r5", "r6", "r7", "r9", "r10", "r11", "r15",\ - "r16", "r17", "r18", "r19", "r20", "r21", "r22",\ - "r23", "r24", "r25" - -#if defined (__CSKY_HARD_FLOAT__) || (__CSKY_VDSP__) -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL, "vr8", "vr9", "vr10", "vr11", "vr12",\ - "vr13", "vr14", "vr15" -#else -#define REGS_TO_SAVE REGS_TO_SAVE_GENERAL -#endif -#else -#define REGS_TO_SAVE "r9", "r10", "r11", "r12", "r13", "r15" -#endif - - -static int -#ifdef __GNUC__ -__attribute__((optimize("no-omit-frame-pointer"))) -#endif -slp_switch(void) -{ - int *stackref, stsizediff; - int result; - - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ ("mov %0, sp" : "=r" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "addu sp,%0\n" - "addu "REG_FP",%0\n" - : - : "r" (stsizediff) - ); - - SLP_RESTORE_STATE(); - } - __asm__ volatile ("movi %0, 0" : "=r" (result)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - - return result; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_loongarch64_linux.h b/myenv/Lib/site-packages/greenlet/platform/switch_loongarch64_linux.h deleted file mode 100644 index 9eaf34e..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_loongarch64_linux.h +++ /dev/null @@ -1,31 +0,0 @@ -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "s0", "s1", "s2", "s3", "s4", "s5", \ - "s6", "s7", "s8", "fp", \ - "f24", "f25", "f26", "f27", "f28", "f29", "f30", "f31" - -static int -slp_switch(void) -{ - int ret; - long *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("move %0, $sp" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "add.d $sp, $sp, %0\n\t" - : /* no outputs */ - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("move %0, $zero" : "=r" (ret) : ); - return ret; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_m68k_gcc.h b/myenv/Lib/site-packages/greenlet/platform/switch_m68k_gcc.h deleted file mode 100644 index da761c2..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_m68k_gcc.h +++ /dev/null @@ -1,38 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 2014-01-06 Andreas Schwab - * File created. - */ - -#ifdef SLP_EVAL - -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "%d2", "%d3", "%d4", "%d5", "%d6", "%d7", \ - "%a2", "%a3", "%a4" - -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - void *fp, *a5; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("move.l %%fp, %0" : "=m"(fp)); - __asm__ volatile ("move.l %%a5, %0" : "=m"(a5)); - __asm__ ("move.l %%sp, %0" : "=r"(stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ("add.l %0, %%sp; add.l %0, %%fp" : : "r"(stsizediff)); - SLP_RESTORE_STATE(); - __asm__ volatile ("clr.l %0" : "=g" (err)); - } - __asm__ volatile ("move.l %0, %%a5" : : "m"(a5)); - __asm__ volatile ("move.l %0, %%fp" : : "m"(fp)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - return err; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_mips_unix.h b/myenv/Lib/site-packages/greenlet/platform/switch_mips_unix.h deleted file mode 100644 index b9003e9..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_mips_unix.h +++ /dev/null @@ -1,64 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 20-Sep-14 Matt Madison - * Re-code the saving of the gp register for MIPS64. - * 05-Jan-08 Thiemo Seufer - * Ported from ppc. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "$16", "$17", "$18", "$19", "$20", "$21", "$22", \ - "$23", "$30" -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; -#ifdef __mips64 - uint64_t gpsave; -#endif - __asm__ __volatile__ ("" : : : REGS_TO_SAVE); -#ifdef __mips64 - __asm__ __volatile__ ("sd $28,%0" : "=m" (gpsave) : : ); -#endif - __asm__ ("move %0, $29" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ __volatile__ ( -#ifdef __mips64 - "daddu $29, %0\n" -#else - "addu $29, %0\n" -#endif - : /* no outputs */ - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } -#ifdef __mips64 - __asm__ __volatile__ ("ld $28,%0" : : "m" (gpsave) : ); -#endif - __asm__ __volatile__ ("" : : : REGS_TO_SAVE); - __asm__ __volatile__ ("move %0, $0" : "=r" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_ppc64_aix.h b/myenv/Lib/site-packages/greenlet/platform/switch_ppc64_aix.h deleted file mode 100644 index e7e0b87..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_ppc64_aix.h +++ /dev/null @@ -1,103 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 16-Oct-20 Jesse Gorzinski - * Copied from Linux PPC64 implementation - * 04-Sep-18 Alexey Borzenkov - * Workaround a gcc bug using manual save/restore of r30 - * 21-Mar-18 Tulio Magno Quites Machado Filho - * Added r30 to the list of saved registers in order to fully comply with - * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this - * register as a nonvolatile register used for local variables. - * 21-Mar-18 Laszlo Boszormenyi - * Save r2 (TOC pointer) manually. - * 10-Dec-13 Ulrich Weigand - * Support ELFv2 ABI. Save float/vector registers. - * 09-Mar-12 Michael Ellerman - * 64-bit implementation, copied from 32-bit. - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 04-Oct-02 Gustavo Niemeyer - * Ported from MacOS version. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - * 31-Jul-12 Trevor Bowen - * Changed memory constraints to register only. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 6 - -#if defined(__ALTIVEC__) -#define ALTIVEC_REGS \ - "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ - "v28", "v29", "v30", "v31", -#else -#define ALTIVEC_REGS -#endif - -#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "r31", \ - "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ - "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ - "fr30", "fr31", \ - ALTIVEC_REGS \ - "cr2", "cr3", "cr4" - -static int -slp_switch(void) -{ - int err; - long *stackref, stsizediff; - void * toc; - void * r30; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("std 2, %0" : "=m" (toc)); - __asm__ volatile ("std 30, %0" : "=m" (r30)); - __asm__ ("mr %0, 1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "mr 11, %0\n" - "add 1, 1, 11\n" - : /* no outputs */ - : "r" (stsizediff) - : "11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("ld 30, %0" : : "m" (r30)); - __asm__ volatile ("ld 2, %0" : : "m" (toc)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_ppc64_linux.h b/myenv/Lib/site-packages/greenlet/platform/switch_ppc64_linux.h deleted file mode 100644 index 3c324d0..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_ppc64_linux.h +++ /dev/null @@ -1,105 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 04-Sep-18 Alexey Borzenkov - * Workaround a gcc bug using manual save/restore of r30 - * 21-Mar-18 Tulio Magno Quites Machado Filho - * Added r30 to the list of saved registers in order to fully comply with - * both ppc64 ELFv1 ABI and the ppc64le ELFv2 ABI, that classify this - * register as a nonvolatile register used for local variables. - * 21-Mar-18 Laszlo Boszormenyi - * Save r2 (TOC pointer) manually. - * 10-Dec-13 Ulrich Weigand - * Support ELFv2 ABI. Save float/vector registers. - * 09-Mar-12 Michael Ellerman - * 64-bit implementation, copied from 32-bit. - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 04-Oct-02 Gustavo Niemeyer - * Ported from MacOS version. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - * 31-Jul-12 Trevor Bowen - * Changed memory constraints to register only. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#if _CALL_ELF == 2 -#define STACK_MAGIC 4 -#else -#define STACK_MAGIC 6 -#endif - -#if defined(__ALTIVEC__) -#define ALTIVEC_REGS \ - "v20", "v21", "v22", "v23", "v24", "v25", "v26", "v27", \ - "v28", "v29", "v30", "v31", -#else -#define ALTIVEC_REGS -#endif - -#define REGS_TO_SAVE "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "r31", \ - "fr14", "fr15", "fr16", "fr17", "fr18", "fr19", "fr20", "fr21", \ - "fr22", "fr23", "fr24", "fr25", "fr26", "fr27", "fr28", "fr29", \ - "fr30", "fr31", \ - ALTIVEC_REGS \ - "cr2", "cr3", "cr4" - -static int -slp_switch(void) -{ - int err; - long *stackref, stsizediff; - void * toc; - void * r30; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("std 2, %0" : "=m" (toc)); - __asm__ volatile ("std 30, %0" : "=m" (r30)); - __asm__ ("mr %0, 1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "mr 11, %0\n" - "add 1, 1, 11\n" - : /* no outputs */ - : "r" (stsizediff) - : "11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("ld 30, %0" : : "m" (r30)); - __asm__ volatile ("ld 2, %0" : : "m" (toc)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_ppc_aix.h b/myenv/Lib/site-packages/greenlet/platform/switch_ppc_aix.h deleted file mode 100644 index 6d93c13..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_ppc_aix.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 07-Mar-11 Floris Bruynooghe - * Do not add stsizediff to general purpose - * register (GPR) 30 as this is a non-volatile and - * unused by the PowerOpen Environment, therefore - * this was modifying a user register instead of the - * frame pointer (which does not seem to exist). - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 04-Oct-02 Gustavo Niemeyer - * Ported from MacOS version. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 3 - -/* !!!!WARNING!!!! need to add "r31" in the next line if this header file - * is meant to be compiled non-dynamically! - */ -#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "cr2", "cr3", "cr4" -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ ("mr %0, 1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "mr 11, %0\n" - "add 1, 1, 11\n" - : /* no outputs */ - : "r" (stsizediff) - : "11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_ppc_linux.h b/myenv/Lib/site-packages/greenlet/platform/switch_ppc_linux.h deleted file mode 100644 index e83ad70..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_ppc_linux.h +++ /dev/null @@ -1,84 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 04-Oct-02 Gustavo Niemeyer - * Ported from MacOS version. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - * 31-Jul-12 Trevor Bowen - * Changed memory constraints to register only. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 3 - -/* !!!!WARNING!!!! need to add "r31" in the next line if this header file - * is meant to be compiled non-dynamically! - */ -#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "cr2", "cr3", "cr4" -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ ("mr %0, 1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "mr 11, %0\n" - "add 1, 1, 11\n" - "add 30, 30, 11\n" - : /* no outputs */ - : "r" (stsizediff) - : "11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_ppc_macosx.h b/myenv/Lib/site-packages/greenlet/platform/switch_ppc_macosx.h deleted file mode 100644 index bd414c6..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_ppc_macosx.h +++ /dev/null @@ -1,82 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 3 - -/* !!!!WARNING!!!! need to add "r31" in the next line if this header file - * is meant to be compiled non-dynamically! - */ -#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "cr2", "cr3", "cr4" - -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ ("; asm block 2\n\tmr %0, r1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "; asm block 3\n" - "\tmr r11, %0\n" - "\tadd r1, r1, r11\n" - "\tadd r30, r30, r11\n" - : /* no outputs */ - : "r" (stsizediff) - : "r11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_ppc_unix.h b/myenv/Lib/site-packages/greenlet/platform/switch_ppc_unix.h deleted file mode 100644 index bb18808..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_ppc_unix.h +++ /dev/null @@ -1,82 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'r31' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 14-Jan-04 Bob Ippolito - * added cr2-cr4 to the registers to be saved. - * Open questions: Should we save FP registers? - * What about vector registers? - * Differences between darwin and unix? - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 04-Oct-02 Gustavo Niemeyer - * Ported from MacOS version. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 29-Jun-02 Christian Tismer - * Added register 13-29, 31 saves. The same way as - * Armin Rigo did for the x86_unix version. - * This seems to be now fully functional! - * 04-Mar-02 Hye-Shik Chang - * Ported from i386. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 3 - -/* !!!!WARNING!!!! need to add "r31" in the next line if this header file - * is meant to be compiled non-dynamically! - */ -#define REGS_TO_SAVE "r13", "r14", "r15", "r16", "r17", "r18", "r19", "r20", \ - "r21", "r22", "r23", "r24", "r25", "r26", "r27", "r28", "r29", \ - "cr2", "cr3", "cr4" -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ ("mr %0, 1" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "mr 11, %0\n" - "add 1, 1, 11\n" - "add 30, 30, 11\n" - : /* no outputs */ - : "r" (stsizediff) - : "11" - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("li %0, 0" : "=r" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_riscv_unix.h b/myenv/Lib/site-packages/greenlet/platform/switch_riscv_unix.h deleted file mode 100644 index e74f37a..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_riscv_unix.h +++ /dev/null @@ -1,36 +0,0 @@ -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "s1", "s2", "s3", "s4", "s5", \ - "s6", "s7", "s8", "s9", "s10", "s11", "fs0", "fs1", \ - "fs2", "fs3", "fs4", "fs5", "fs6", "fs7", "fs8", "fs9", \ - "fs10", "fs11" - -static int -slp_switch(void) -{ - long fp; - int ret; - long *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("mv %0, fp" : "=r" (fp) : ); - __asm__ volatile ("mv %0, sp" : "=r" (stackref) : ); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "add sp, sp, %0\n\t" - "add fp, fp, %0\n\t" - : /* no outputs */ - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("ld fp, %0" : : "m" (fp)); - __asm__ volatile ("mv %0, zero" : "=r" (ret) : ); - return ret; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_s390_unix.h b/myenv/Lib/site-packages/greenlet/platform/switch_s390_unix.h deleted file mode 100644 index 9199367..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_s390_unix.h +++ /dev/null @@ -1,87 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 25-Jan-12 Alexey Borzenkov - * Fixed Linux/S390 port to work correctly with - * different optimization options both on 31-bit - * and 64-bit. Thanks to Stefan Raabe for lots - * of testing. - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 06-Oct-02 Gustavo Niemeyer - * Ported to Linux/S390. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#ifdef __s390x__ -#define STACK_MAGIC 20 /* 20 * 8 = 160 bytes of function call area */ -#else -#define STACK_MAGIC 24 /* 24 * 4 = 96 bytes of function call area */ -#endif - -/* Technically, r11-r13 also need saving, but function prolog starts - with stm(g) and since there are so many saved registers already - it won't be optimized, resulting in all r6-r15 being saved */ -#define REGS_TO_SAVE "r6", "r7", "r8", "r9", "r10", "r14", \ - "f0", "f1", "f2", "f3", "f4", "f5", "f6", "f7", \ - "f8", "f9", "f10", "f11", "f12", "f13", "f14", "f15" - -static int -slp_switch(void) -{ - int ret; - long *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); -#ifdef __s390x__ - __asm__ volatile ("lgr %0, 15" : "=r" (stackref) : ); -#else - __asm__ volatile ("lr %0, 15" : "=r" (stackref) : ); -#endif - { - SLP_SAVE_STATE(stackref, stsizediff); -/* N.B. - r11 may be used as the frame pointer, and in that case it cannot be - clobbered and needs offsetting just like the stack pointer (but in cases - where frame pointer isn't used we might clobber it accidentally). What's - scary is that r11 is 2nd (and even 1st when GOT is used) callee saved - register that gcc would chose for surviving function calls. However, - since r6-r10 are clobbered above, their cost for reuse is reduced, so - gcc IRA will chose them over r11 (not seeing r11 is implicitly saved), - making it relatively safe to offset in all cases. :) */ - __asm__ volatile ( -#ifdef __s390x__ - "agr 15, %0\n\t" - "agr 11, %0" -#else - "ar 15, %0\n\t" - "ar 11, %0" -#endif - : /* no outputs */ - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("lhi %0, 0" : "=r" (ret) : ); - return ret; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_sh_gcc.h b/myenv/Lib/site-packages/greenlet/platform/switch_sh_gcc.h deleted file mode 100644 index 5ecc3b3..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_sh_gcc.h +++ /dev/null @@ -1,36 +0,0 @@ -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL -#define STACK_MAGIC 0 -#define REGS_TO_SAVE "r8", "r9", "r10", "r11", "r13", \ - "fr12", "fr13", "fr14", "fr15" - -// r12 Global context pointer, GP -// r14 Frame pointer, FP -// r15 Stack pointer, SP - -static int -slp_switch(void) -{ - int err; - void* fp; - int *stackref, stsizediff; - __asm__ volatile("" : : : REGS_TO_SAVE); - __asm__ volatile("mov.l r14, %0" : "=m"(fp) : :); - __asm__("mov r15, %0" : "=r"(stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile( - "add %0, r15\n" - "add %0, r14\n" - : /* no outputs */ - : "r"(stsizediff)); - SLP_RESTORE_STATE(); - __asm__ volatile("mov r0, %0" : "=r"(err) : :); - } - __asm__ volatile("mov.l %0, r14" : : "m"(fp) :); - __asm__ volatile("" : : : REGS_TO_SAVE); - return err; -} - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_sparc_sun_gcc.h b/myenv/Lib/site-packages/greenlet/platform/switch_sparc_sun_gcc.h deleted file mode 100644 index 96990c3..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_sparc_sun_gcc.h +++ /dev/null @@ -1,92 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 16-May-15 Alexey Borzenkov - * Move stack spilling code inside save/restore functions - * 30-Aug-13 Floris Bruynooghe - Clean the register windows again before returning. - This does not clobber the PIC register as it leaves - the current window intact and is required for multi- - threaded code to work correctly. - * 08-Mar-11 Floris Bruynooghe - * No need to set return value register explicitly - * before the stack and framepointer are adjusted - * as none of the other registers are influenced by - * this. Also don't needlessly clean the windows - * ('ta %0" :: "i" (ST_CLEAN_WINDOWS)') as that - * clobbers the gcc PIC register (%l7). - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * added support for SunOS sparc with gcc - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - - -#define STACK_MAGIC 0 - - -#if defined(__sparcv9) -#define SLP_FLUSHW __asm__ volatile ("flushw") -#else -#define SLP_FLUSHW __asm__ volatile ("ta 3") /* ST_FLUSH_WINDOWS */ -#endif - -/* On sparc we need to spill register windows inside save/restore functions */ -#define SLP_BEFORE_SAVE_STATE() SLP_FLUSHW -#define SLP_BEFORE_RESTORE_STATE() SLP_FLUSHW - - -static int -slp_switch(void) -{ - int err; - int *stackref, stsizediff; - - /* Put current stack pointer into stackref. - * Register spilling is done in save/restore. - */ - __asm__ volatile ("mov %%sp, %0" : "=r" (stackref)); - - { - /* Thou shalt put SLP_SAVE_STATE into a local block */ - /* Copy the current stack onto the heap */ - SLP_SAVE_STATE(stackref, stsizediff); - - /* Increment stack and frame pointer by stsizediff */ - __asm__ volatile ( - "add %0, %%sp, %%sp\n\t" - "add %0, %%fp, %%fp" - : : "r" (stsizediff)); - - /* Copy new stack from it's save store on the heap */ - SLP_RESTORE_STATE(); - - __asm__ volatile ("mov %1, %0" : "=r" (err) : "i" (0)); - return err; - } -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_x32_unix.h b/myenv/Lib/site-packages/greenlet/platform/switch_x32_unix.h deleted file mode 100644 index 893369c..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_x32_unix.h +++ /dev/null @@ -1,63 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 17-Aug-12 Fantix King - * Ported from amd64. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 0 - -#define REGS_TO_SAVE "r12", "r13", "r14", "r15" - - -static int -slp_switch(void) -{ - void* ebp; - void* ebx; - unsigned int csr; - unsigned short cw; - int err; - int *stackref, stsizediff; - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("fstcw %0" : "=m" (cw)); - __asm__ volatile ("stmxcsr %0" : "=m" (csr)); - __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); - __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); - __asm__ ("movl %%esp, %0" : "=g" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "addl %0, %%esp\n" - "addl %0, %%ebp\n" - : - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - } - __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); - __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); - __asm__ volatile ("ldmxcsr %0" : : "m" (csr)); - __asm__ volatile ("fldcw %0" : : "m" (cw)); - __asm__ volatile ("" : : : REGS_TO_SAVE); - __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_x64_masm.asm b/myenv/Lib/site-packages/greenlet/platform/switch_x64_masm.asm deleted file mode 100644 index f5c72a2..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_x64_masm.asm +++ /dev/null @@ -1,111 +0,0 @@ -; -; stack switching code for MASM on x641 -; Kristjan Valur Jonsson, sept 2005 -; - - -;prototypes for our calls -slp_save_state_asm PROTO -slp_restore_state_asm PROTO - - -pushxmm MACRO reg - sub rsp, 16 - .allocstack 16 - movaps [rsp], reg ; faster than movups, but we must be aligned - ; .savexmm128 reg, offset (don't know what offset is, no documentation) -ENDM -popxmm MACRO reg - movaps reg, [rsp] ; faster than movups, but we must be aligned - add rsp, 16 -ENDM - -pushreg MACRO reg - push reg - .pushreg reg -ENDM -popreg MACRO reg - pop reg -ENDM - - -.code -slp_switch PROC FRAME - ;realign stack to 16 bytes after return address push, makes the following faster - sub rsp,8 - .allocstack 8 - - pushxmm xmm15 - pushxmm xmm14 - pushxmm xmm13 - pushxmm xmm12 - pushxmm xmm11 - pushxmm xmm10 - pushxmm xmm9 - pushxmm xmm8 - pushxmm xmm7 - pushxmm xmm6 - - pushreg r15 - pushreg r14 - pushreg r13 - pushreg r12 - - pushreg rbp - pushreg rbx - pushreg rdi - pushreg rsi - - sub rsp, 10h ;allocate the singlefunction argument (must be multiple of 16) - .allocstack 10h -.endprolog - - lea rcx, [rsp+10h] ;load stack base that we are saving - call slp_save_state_asm ;pass stackpointer, return offset in eax - cmp rax, 1 - je EXIT1 - cmp rax, -1 - je EXIT2 - ;actual stack switch: - add rsp, rax - call slp_restore_state_asm - xor rax, rax ;return 0 - -EXIT: - - add rsp, 10h - popreg rsi - popreg rdi - popreg rbx - popreg rbp - - popreg r12 - popreg r13 - popreg r14 - popreg r15 - - popxmm xmm6 - popxmm xmm7 - popxmm xmm8 - popxmm xmm9 - popxmm xmm10 - popxmm xmm11 - popxmm xmm12 - popxmm xmm13 - popxmm xmm14 - popxmm xmm15 - - add rsp, 8 - ret - -EXIT1: - mov rax, 1 - jmp EXIT - -EXIT2: - sar rax, 1 - jmp EXIT - -slp_switch ENDP - -END \ No newline at end of file diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_x64_masm.obj b/myenv/Lib/site-packages/greenlet/platform/switch_x64_masm.obj deleted file mode 100644 index 64e3e6b..0000000 Binary files a/myenv/Lib/site-packages/greenlet/platform/switch_x64_masm.obj and /dev/null differ diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_x64_msvc.h b/myenv/Lib/site-packages/greenlet/platform/switch_x64_msvc.h deleted file mode 100644 index 601ea56..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_x64_msvc.h +++ /dev/null @@ -1,60 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 26-Sep-02 Christian Tismer - * again as a result of virtualized stack access, - * the compiler used less registers. Needed to - * explicit mention registers in order to get them saved. - * Thanks to Jeff Senn for pointing this out and help. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 01-Mar-02 Christian Tismer - * Initial final version after lots of iterations for i386. - */ - -/* Avoid alloca redefined warning on mingw64 */ -#ifndef alloca -#define alloca _alloca -#endif - -#define STACK_REFPLUS 1 -#define STACK_MAGIC 0 - -/* Use the generic support for an external assembly language slp_switch function. */ -#define EXTERNAL_ASM - -#ifdef SLP_EVAL -/* This always uses the external masm assembly file. */ -#endif - -/* - * further self-processing support - */ - -/* we have IsBadReadPtr available, so we can peek at objects */ -/* -#define STACKLESS_SPY - -#ifdef IMPLEMENT_STACKLESSMODULE -#include "Windows.h" -#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) - -static int IS_ON_STACK(void*p) -{ - int stackref; - intptr_t stackbase = ((intptr_t)&stackref) & 0xfffff000; - return (intptr_t)p >= stackbase && (intptr_t)p < stackbase + 0x00100000; -} - -#endif -*/ \ No newline at end of file diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_x86_msvc.h b/myenv/Lib/site-packages/greenlet/platform/switch_x86_msvc.h deleted file mode 100644 index 0f3a59f..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_x86_msvc.h +++ /dev/null @@ -1,326 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 26-Sep-02 Christian Tismer - * again as a result of virtualized stack access, - * the compiler used less registers. Needed to - * explicit mention registers in order to get them saved. - * Thanks to Jeff Senn for pointing this out and help. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for sparc - * 01-Mar-02 Christian Tismer - * Initial final version after lots of iterations for i386. - */ - -#define alloca _alloca - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -#define STACK_MAGIC 0 - -/* Some magic to quell warnings and keep slp_switch() from crashing when built - with VC90. Disable global optimizations, and the warning: frame pointer - register 'ebp' modified by inline assembly code. - - We used to just disable global optimizations ("g") but upstream stackless - Python, as well as stackman, turn off all optimizations. - -References: -https://github.com/stackless-dev/stackman/blob/dbc72fe5207a2055e658c819fdeab9731dee78b9/stackman/platforms/switch_x86_msvc.h -https://github.com/stackless-dev/stackless/blob/main-slp/Stackless/platf/switch_x86_msvc.h -*/ -#define WIN32_LEAN_AND_MEAN -#include - -#pragma optimize("", off) /* so that autos are stored on the stack */ -#pragma warning(disable:4731) -#pragma warning(disable:4733) /* disable warning about modifying FS[0] */ - -/** - * Most modern compilers and environments handle C++ exceptions without any - * special help from us. MSVC on 32-bit windows is an exception. There, C++ - * exceptions are dealt with using Windows' Structured Exception Handling - * (SEH). - * - * SEH is implemented as a singly linked list of nodes. The - * head of this list is stored in the Thread Information Block, which itself - * is pointed to from the FS register. It's the first field in the structure, - * or offset 0, so we can access it using assembly FS:[0], or the compiler - * intrinsics and field offset information from the headers (as we do below). - * Somewhat unusually, the tail of the list doesn't have prev == NULL, it has - * prev == 0xFFFFFFFF. - * - * SEH was designed for C, and traditionally uses the MSVC compiler - * intrinsincs __try{}/__except{}. It is also utilized for C++ exceptions by - * MSVC; there, every throw of a C++ exception raises a SEH error with the - * ExceptionCode 0xE06D7363; the SEH handler list is then traversed to - * deal with the exception. - * - * If the SEH list is corrupt, then when a C++ exception is thrown the program - * will abruptly exit with exit code 1. This does not use std::terminate(), so - * std::set_terminate() is useless to debug this. - * - * The SEH list is closely tied to the call stack; entering a function that - * uses __try{} or most C++ functions will push a new handler onto the front - * of the list. Returning from the function will remove the handler. Saving - * and restoring the head node of the SEH list (FS:[0]) per-greenlet is NOT - * ENOUGH to make SEH or exceptions work. - * - * Stack switching breaks SEH because the call stack no longer necessarily - * matches the SEH list. For example, given greenlet A that switches to - * greenlet B, at the moment of entering greenlet B, we will have any SEH - * handlers from greenlet A on the SEH list; greenlet B can then add its own - * handlers to the SEH list. When greenlet B switches back to greenlet A, - * greenlet B's handlers would still be on the SEH stack, but when switch() - * returns control to greenlet A, we have replaced the contents of the stack - * in memory, so all the address that greenlet B added to the SEH list are now - * invalid: part of the call stack has been unwound, but the SEH list was out - * of sync with the call stack. The net effect is that exception handling - * stops working. - * - * Thus, when switching greenlets, we need to be sure that the SEH list - * matches the effective call stack, "cutting out" any handlers that were - * pushed by the greenlet that switched out and which are no longer valid. - * - * The easiest way to do this is to capture the SEH list at the time the main - * greenlet for a thread is created, and, when initially starting a greenlet, - * start a new SEH list for it, which contains nothing but the handler - * established for the new greenlet itself, with the tail being the handlers - * for the main greenlet. If we then save and restore the SEH per-greenlet, - * they won't interfere with each others SEH lists. (No greenlet can unwind - * the call stack past the handlers established by the main greenlet). - * - * By observation, a new thread starts with three SEH handlers on the list. By - * the time we get around to creating the main greenlet, though, there can be - * many more, established by transient calls that lead to the creation of the - * main greenlet. Therefore, 3 is a magic constant telling us when to perform - * the initial slice. - * - * All of this can be debugged using a vectored exception handler, which - * operates independently of the SEH handler list, and is called first. - * Walking the SEH list at key points can also be helpful. - * - * References: - * https://en.wikipedia.org/wiki/Win32_Thread_Information_Block - * https://devblogs.microsoft.com/oldnewthing/20100730-00/?p=13273 - * https://docs.microsoft.com/en-us/cpp/cpp/try-except-statement?view=msvc-160 - * https://docs.microsoft.com/en-us/cpp/cpp/structured-exception-handling-c-cpp?view=msvc-160 - * https://docs.microsoft.com/en-us/windows/win32/debug/structured-exception-handling - * https://docs.microsoft.com/en-us/windows/win32/debug/using-a-vectored-exception-handler - * https://bytepointer.com/resources/pietrek_crash_course_depths_of_win32_seh.htm - */ -#define GREENLET_NEEDS_EXCEPTION_STATE_SAVED - - -typedef struct _GExceptionRegistration { - struct _GExceptionRegistration* prev; - void* handler_f; -} GExceptionRegistration; - -static void -slp_set_exception_state(const void *const seh_state) -{ - // Because the stack from from which we do this is ALSO a handler, and - // that one we want to keep, we need to relink the current SEH handler - // frame to point to this one, cutting out the middle men, as it were. - // - // Entering a try block doesn't change the SEH frame, but entering a - // function containing a try block does. - GExceptionRegistration* current_seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); - current_seh_state->prev = (GExceptionRegistration*)seh_state; -} - - -static GExceptionRegistration* -x86_slp_get_third_oldest_handler() -{ - GExceptionRegistration* a = NULL; /* Closest to the top */ - GExceptionRegistration* b = NULL; /* second */ - GExceptionRegistration* c = NULL; - GExceptionRegistration* seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); - a = b = c = seh_state; - - while (seh_state && seh_state != (GExceptionRegistration*)0xFFFFFFFF) { - if ((void*)seh_state->prev < (void*)100) { - fprintf(stderr, "\tERROR: Broken SEH chain.\n"); - return NULL; - } - a = b; - b = c; - c = seh_state; - - seh_state = seh_state->prev; - } - return a ? a : (b ? b : c); -} - - -static void* -slp_get_exception_state() -{ - // XXX: There appear to be three SEH handlers on the stack already at the - // start of the thread. Is that a guarantee? Almost certainly not. Yet in - // all observed cases it has been three. This is consistent with - // faulthandler off or on, and optimizations off or on. It may not be - // consistent with other operating system versions, though: we only have - // CI on one or two versions (don't ask what there are). - // In theory we could capture the number of handlers on the chain when - // PyInit__greenlet is called: there are probably only the default - // handlers at that point (unless we're embedded and people have used - // __try/__except or a C++ handler)? - return x86_slp_get_third_oldest_handler(); -} - -static int -slp_switch(void) -{ - /* MASM syntax is typically reversed from other assemblers. - It is usually - */ - int *stackref, stsizediff; - /* store the structured exception state for this stack */ - DWORD seh_state = __readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); - __asm mov stackref, esp; - /* modify EBX, ESI and EDI in order to get them preserved */ - __asm mov ebx, ebx; - __asm xchg esi, edi; - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm { - mov eax, stsizediff - add esp, eax - add ebp, eax - } - SLP_RESTORE_STATE(); - } - __writefsdword(FIELD_OFFSET(NT_TIB, ExceptionList), seh_state); - return 0; -} - -/* re-enable ebp warning and global optimizations. */ -#pragma optimize("", on) -#pragma warning(default:4731) -#pragma warning(default:4733) /* disable warning about modifying FS[0] */ - - -#endif - -/* - * further self-processing support - */ - -/* we have IsBadReadPtr available, so we can peek at objects */ -#define STACKLESS_SPY - -#ifdef GREENLET_DEBUG - -#define CANNOT_READ_MEM(p, bytes) IsBadReadPtr(p, bytes) - -static int IS_ON_STACK(void*p) -{ - int stackref; - int stackbase = ((int)&stackref) & 0xfffff000; - return (int)p >= stackbase && (int)p < stackbase + 0x00100000; -} - -static void -x86_slp_show_seh_chain() -{ - GExceptionRegistration* seh_state = (GExceptionRegistration*)__readfsdword(FIELD_OFFSET(NT_TIB, ExceptionList)); - fprintf(stderr, "====== SEH Chain ======\n"); - while (seh_state && seh_state != (GExceptionRegistration*)0xFFFFFFFF) { - fprintf(stderr, "\tSEH_chain addr: %p handler: %p prev: %p\n", - seh_state, - seh_state->handler_f, seh_state->prev); - if ((void*)seh_state->prev < (void*)100) { - fprintf(stderr, "\tERROR: Broken chain.\n"); - break; - } - seh_state = seh_state->prev; - } - fprintf(stderr, "====== End SEH Chain ======\n"); - fflush(NULL); - return; -} - -//addVectoredExceptionHandler constants: -//CALL_FIRST means call this exception handler first; -//CALL_LAST means call this exception handler last -#define CALL_FIRST 1 -#define CALL_LAST 0 - -LONG WINAPI -GreenletVectorHandler(PEXCEPTION_POINTERS ExceptionInfo) -{ - // We get one of these for every C++ exception, with code - // E06D7363 - // This is a special value that means "C++ exception from MSVC" - // https://devblogs.microsoft.com/oldnewthing/20100730-00/?p=13273 - // - // Install in the module init function with: - // AddVectoredExceptionHandler(CALL_FIRST, GreenletVectorHandler); - PEXCEPTION_RECORD ExceptionRecord = ExceptionInfo->ExceptionRecord; - - fprintf(stderr, - "GOT VECTORED EXCEPTION:\n" - "\tExceptionCode : %p\n" - "\tExceptionFlags : %p\n" - "\tExceptionAddr : %p\n" - "\tNumberparams : %ld\n", - ExceptionRecord->ExceptionCode, - ExceptionRecord->ExceptionFlags, - ExceptionRecord->ExceptionAddress, - ExceptionRecord->NumberParameters - ); - if (ExceptionRecord->ExceptionFlags & 1) { - fprintf(stderr, "\t\tEH_NONCONTINUABLE\n" ); - } - if (ExceptionRecord->ExceptionFlags & 2) { - fprintf(stderr, "\t\tEH_UNWINDING\n" ); - } - if (ExceptionRecord->ExceptionFlags & 4) { - fprintf(stderr, "\t\tEH_EXIT_UNWIND\n" ); - } - if (ExceptionRecord->ExceptionFlags & 8) { - fprintf(stderr, "\t\tEH_STACK_INVALID\n" ); - } - if (ExceptionRecord->ExceptionFlags & 0x10) { - fprintf(stderr, "\t\tEH_NESTED_CALL\n" ); - } - if (ExceptionRecord->ExceptionFlags & 0x20) { - fprintf(stderr, "\t\tEH_TARGET_UNWIND\n" ); - } - if (ExceptionRecord->ExceptionFlags & 0x40) { - fprintf(stderr, "\t\tEH_COLLIDED_UNWIND\n" ); - } - fprintf(stderr, "\n"); - fflush(NULL); - for(DWORD i = 0; i < ExceptionRecord->NumberParameters; i++) { - fprintf(stderr, "\t\t\tParam %ld: %lX\n", i, ExceptionRecord->ExceptionInformation[i]); - } - - if (ExceptionRecord->NumberParameters == 3) { - fprintf(stderr, "\tAbout to traverse SEH chain\n"); - // C++ Exception records have 3 params. - x86_slp_show_seh_chain(); - } - - return EXCEPTION_CONTINUE_SEARCH; -} - - - - -#endif diff --git a/myenv/Lib/site-packages/greenlet/platform/switch_x86_unix.h b/myenv/Lib/site-packages/greenlet/platform/switch_x86_unix.h deleted file mode 100644 index 493fa6b..0000000 --- a/myenv/Lib/site-packages/greenlet/platform/switch_x86_unix.h +++ /dev/null @@ -1,105 +0,0 @@ -/* - * this is the internal transfer function. - * - * HISTORY - * 3-May-13 Ralf Schmitt - * Add support for strange GCC caller-save decisions - * (ported from switch_aarch64_gcc.h) - * 19-Aug-11 Alexey Borzenkov - * Correctly save ebp, ebx and cw - * 07-Sep-05 (py-dev mailing list discussion) - * removed 'ebx' from the register-saved. !!!! WARNING !!!! - * It means that this file can no longer be compiled statically! - * It is now only suitable as part of a dynamic library! - * 24-Nov-02 Christian Tismer - * needed to add another magic constant to insure - * that f in slp_eval_frame(PyFrameObject *f) - * STACK_REFPLUS will probably be 1 in most cases. - * gets included into the saved stack area. - * 17-Sep-02 Christian Tismer - * after virtualizing stack save/restore, the - * stack size shrunk a bit. Needed to introduce - * an adjustment STACK_MAGIC per platform. - * 15-Sep-02 Gerd Woetzel - * slightly changed framework for spark - * 31-Avr-02 Armin Rigo - * Added ebx, esi and edi register-saves. - * 01-Mar-02 Samual M. Rushing - * Ported from i386. - */ - -#define STACK_REFPLUS 1 - -#ifdef SLP_EVAL - -/* #define STACK_MAGIC 3 */ -/* the above works fine with gcc 2.96, but 2.95.3 wants this */ -#define STACK_MAGIC 0 - -#if __GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 5) -# define ATTR_NOCLONE __attribute__((noclone)) -#else -# define ATTR_NOCLONE -#endif - -static int -slp_switch(void) -{ - int err; -#ifdef _WIN32 - void *seh; -#endif - void *ebp, *ebx; - unsigned short cw; - int *stackref, stsizediff; - __asm__ volatile ("" : : : "esi", "edi"); - __asm__ volatile ("fstcw %0" : "=m" (cw)); - __asm__ volatile ("movl %%ebp, %0" : "=m" (ebp)); - __asm__ volatile ("movl %%ebx, %0" : "=m" (ebx)); -#ifdef _WIN32 - __asm__ volatile ( - "movl %%fs:0x0, %%eax\n" - "movl %%eax, %0\n" - : "=m" (seh) - : - : "eax"); -#endif - __asm__ ("movl %%esp, %0" : "=g" (stackref)); - { - SLP_SAVE_STATE(stackref, stsizediff); - __asm__ volatile ( - "addl %0, %%esp\n" - "addl %0, %%ebp\n" - : - : "r" (stsizediff) - ); - SLP_RESTORE_STATE(); - __asm__ volatile ("xorl %%eax, %%eax" : "=a" (err)); - } -#ifdef _WIN32 - __asm__ volatile ( - "movl %0, %%eax\n" - "movl %%eax, %%fs:0x0\n" - : - : "m" (seh) - : "eax"); -#endif - __asm__ volatile ("movl %0, %%ebx" : : "m" (ebx)); - __asm__ volatile ("movl %0, %%ebp" : : "m" (ebp)); - __asm__ volatile ("fldcw %0" : : "m" (cw)); - __asm__ volatile ("" : : : "esi", "edi"); - return err; -} - -#endif - -/* - * further self-processing support - */ - -/* - * if you want to add self-inspection tools, place them - * here. See the x86_msvc for the necessary defines. - * These features are highly experimental und not - * essential yet. - */ diff --git a/myenv/Lib/site-packages/greenlet/slp_platformselect.h b/myenv/Lib/site-packages/greenlet/slp_platformselect.h deleted file mode 100644 index 4945648..0000000 --- a/myenv/Lib/site-packages/greenlet/slp_platformselect.h +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Platform Selection for Stackless Python - */ -#ifdef __cplusplus -extern "C" { -#endif - -#if defined(MS_WIN32) && !defined(MS_WIN64) && defined(_M_IX86) && defined(_MSC_VER) -# include "platform/switch_x86_msvc.h" /* MS Visual Studio on X86 */ -#elif defined(MS_WIN64) && defined(_M_X64) && defined(_MSC_VER) || defined(__MINGW64__) -# include "platform/switch_x64_msvc.h" /* MS Visual Studio on X64 */ -#elif defined(MS_WIN64) && defined(_M_ARM64) -# include "platform/switch_arm64_msvc.h" /* MS Visual Studio on ARM64 */ -#elif defined(__GNUC__) && defined(__amd64__) && defined(__ILP32__) -# include "platform/switch_x32_unix.h" /* gcc on amd64 with x32 ABI */ -#elif defined(__GNUC__) && defined(__amd64__) -# include "platform/switch_amd64_unix.h" /* gcc on amd64 */ -#elif defined(__GNUC__) && defined(__i386__) -# include "platform/switch_x86_unix.h" /* gcc on X86 */ -#elif defined(__GNUC__) && defined(__powerpc64__) && (defined(__linux__) || defined(__FreeBSD__)) -# include "platform/switch_ppc64_linux.h" /* gcc on PowerPC 64-bit */ -#elif defined(__GNUC__) && defined(__PPC__) && (defined(__linux__) || defined(__FreeBSD__)) -# include "platform/switch_ppc_linux.h" /* gcc on PowerPC */ -#elif defined(__GNUC__) && defined(__POWERPC__) && defined(__APPLE__) -# include "platform/switch_ppc_macosx.h" /* Apple MacOS X on 32-bit PowerPC */ -#elif defined(__GNUC__) && defined(__powerpc64__) && defined(_AIX) -# include "platform/switch_ppc64_aix.h" /* gcc on AIX/PowerPC 64-bit */ -#elif defined(__GNUC__) && defined(_ARCH_PPC) && defined(_AIX) -# include "platform/switch_ppc_aix.h" /* gcc on AIX/PowerPC */ -#elif defined(__GNUC__) && defined(__powerpc__) && defined(__NetBSD__) -#include "platform/switch_ppc_unix.h" /* gcc on NetBSD/powerpc */ -#elif defined(__GNUC__) && defined(sparc) -# include "platform/switch_sparc_sun_gcc.h" /* SunOS sparc with gcc */ -#elif defined(__SUNPRO_C) && defined(sparc) && defined(sun) -# iiclude "platform/switch_sparc_sun_gcc.h" /* SunStudio on amd64 */ -#elif defined(__SUNPRO_C) && defined(__amd64__) && defined(sun) -# include "platform/switch_amd64_unix.h" /* SunStudio on amd64 */ -#elif defined(__SUNPRO_C) && defined(__i386__) && defined(sun) -# include "platform/switch_x86_unix.h" /* SunStudio on x86 */ -#elif defined(__GNUC__) && defined(__s390__) && defined(__linux__) -# include "platform/switch_s390_unix.h" /* Linux/S390 */ -#elif defined(__GNUC__) && defined(__s390x__) && defined(__linux__) -# include "platform/switch_s390_unix.h" /* Linux/S390 zSeries (64-bit) */ -#elif defined(__GNUC__) && defined(__arm__) -# ifdef __APPLE__ -# include -# endif -# if TARGET_OS_IPHONE -# include "platform/switch_arm32_ios.h" /* iPhone OS on arm32 */ -# else -# include "platform/switch_arm32_gcc.h" /* gcc using arm32 */ -# endif -#elif defined(__GNUC__) && defined(__mips__) && defined(__linux__) -# include "platform/switch_mips_unix.h" /* Linux/MIPS */ -#elif defined(__GNUC__) && defined(__aarch64__) -# include "platform/switch_aarch64_gcc.h" /* Aarch64 ABI */ -#elif defined(__GNUC__) && defined(__mc68000__) -# include "platform/switch_m68k_gcc.h" /* gcc on m68k */ -#elif defined(__GNUC__) && defined(__csky__) -#include "platform/switch_csky_gcc.h" /* gcc on csky */ -# elif defined(__GNUC__) && defined(__riscv) -# include "platform/switch_riscv_unix.h" /* gcc on RISC-V */ -#elif defined(__GNUC__) && defined(__alpha__) -# include "platform/switch_alpha_unix.h" /* gcc on DEC Alpha */ -#elif defined(MS_WIN32) && defined(__llvm__) && defined(__aarch64__) -# include "platform/switch_aarch64_gcc.h" /* LLVM Aarch64 ABI for Windows */ -#elif defined(__GNUC__) && defined(__loongarch64) && defined(__linux__) -# include "platform/switch_loongarch64_linux.h" /* LoongArch64 */ -#elif defined(__GNUC__) && defined(__sh__) -# include "platform/switch_sh_gcc.h" /* SuperH */ -#endif - -#ifdef __cplusplus -}; -#endif diff --git a/myenv/Lib/site-packages/greenlet/tests/__init__.py b/myenv/Lib/site-packages/greenlet/tests/__init__.py deleted file mode 100644 index e69392e..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/__init__.py +++ /dev/null @@ -1,240 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tests for greenlet. - -""" -import os -import sys -import unittest - -from gc import collect -from gc import get_objects -from threading import active_count as active_thread_count -from time import sleep -from time import time - -import psutil - -from greenlet import greenlet as RawGreenlet -from greenlet import getcurrent - -from greenlet._greenlet import get_pending_cleanup_count -from greenlet._greenlet import get_total_main_greenlets - -from . import leakcheck - -PY312 = sys.version_info[:2] >= (3, 12) -PY313 = sys.version_info[:2] >= (3, 13) - -WIN = sys.platform.startswith("win") -RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') -RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS -RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') -RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR -RUNNING_ON_MANYLINUX = os.environ.get('GREENLET_MANYLINUX') - -class TestCaseMetaClass(type): - # wrap each test method with - # a) leak checks - def __new__(cls, classname, bases, classDict): - # pylint and pep8 fight over what this should be called (mcs or cls). - # pylint gets it right, but we can't scope disable pep8, so we go with - # its convention. - # pylint: disable=bad-mcs-classmethod-argument - check_totalrefcount = True - - # Python 3: must copy, we mutate the classDict. Interestingly enough, - # it doesn't actually error out, but under 3.6 we wind up wrapping - # and re-wrapping the same items over and over and over. - for key, value in list(classDict.items()): - if key.startswith('test') and callable(value): - classDict.pop(key) - if check_totalrefcount: - value = leakcheck.wrap_refcount(value) - classDict[key] = value - return type.__new__(cls, classname, bases, classDict) - - -class TestCase(TestCaseMetaClass( - "NewBase", - (unittest.TestCase,), - {})): - - cleanup_attempt_sleep_duration = 0.001 - cleanup_max_sleep_seconds = 1 - - def wait_for_pending_cleanups(self, - initial_active_threads=None, - initial_main_greenlets=None): - initial_active_threads = initial_active_threads or self.threads_before_test - initial_main_greenlets = initial_main_greenlets or self.main_greenlets_before_test - sleep_time = self.cleanup_attempt_sleep_duration - # NOTE: This is racy! A Python-level thread object may be dead - # and gone, but the C thread may not yet have fired its - # destructors and added to the queue. There's no particular - # way to know that's about to happen. We try to watch the - # Python threads to make sure they, at least, have gone away. - # Counting the main greenlets, which we can easily do deterministically, - # also helps. - - # Always sleep at least once to let other threads run - sleep(sleep_time) - quit_after = time() + self.cleanup_max_sleep_seconds - # TODO: We could add an API that calls us back when a particular main greenlet is deleted? - # It would have to drop the GIL - while ( - get_pending_cleanup_count() - or active_thread_count() > initial_active_threads - or (not self.expect_greenlet_leak - and get_total_main_greenlets() > initial_main_greenlets)): - sleep(sleep_time) - if time() > quit_after: - print("Time limit exceeded.") - print("Threads: Waiting for only", initial_active_threads, - "-->", active_thread_count()) - print("MGlets : Waiting for only", initial_main_greenlets, - "-->", get_total_main_greenlets()) - break - collect() - - def count_objects(self, kind=list, exact_kind=True): - # pylint:disable=unidiomatic-typecheck - # Collect the garbage. - for _ in range(3): - collect() - if exact_kind: - return sum( - 1 - for x in get_objects() - if type(x) is kind - ) - # instances - return sum( - 1 - for x in get_objects() - if isinstance(x, kind) - ) - - greenlets_before_test = 0 - threads_before_test = 0 - main_greenlets_before_test = 0 - expect_greenlet_leak = False - - def count_greenlets(self): - """ - Find all the greenlets and subclasses tracked by the GC. - """ - return self.count_objects(RawGreenlet, False) - - def setUp(self): - # Ensure the main greenlet exists, otherwise the first test - # gets a false positive leak - super().setUp() - getcurrent() - self.threads_before_test = active_thread_count() - self.main_greenlets_before_test = get_total_main_greenlets() - self.wait_for_pending_cleanups(self.threads_before_test, self.main_greenlets_before_test) - self.greenlets_before_test = self.count_greenlets() - - def tearDown(self): - if getattr(self, 'skipTearDown', False): - return - - self.wait_for_pending_cleanups(self.threads_before_test, self.main_greenlets_before_test) - super().tearDown() - - def get_expected_returncodes_for_aborted_process(self): - import signal - # The child should be aborted in an unusual way. On POSIX - # platforms, this is done with abort() and signal.SIGABRT, - # which is reflected in a negative return value; however, on - # Windows, even though we observe the child print "Fatal - # Python error: Aborted" and in older versions of the C - # runtime "This application has requested the Runtime to - # terminate it in an unusual way," it always has an exit code - # of 3. This is interesting because 3 is the error code for - # ERROR_PATH_NOT_FOUND; BUT: the C runtime abort() function - # also uses this code. - # - # If we link to the static C library on Windows, the error - # code changes to '0xc0000409' (hex(3221226505)), which - # apparently is STATUS_STACK_BUFFER_OVERRUN; but "What this - # means is that nowadays when you get a - # STATUS_STACK_BUFFER_OVERRUN, it doesn’t actually mean that - # there is a stack buffer overrun. It just means that the - # application decided to terminate itself with great haste." - # - # - # On windows, we've also seen '0xc0000005' (hex(3221225477)). - # That's "Access Violation" - # - # See - # https://devblogs.microsoft.com/oldnewthing/20110519-00/?p=10623 - # and - # https://docs.microsoft.com/en-us/previous-versions/k089yyh0(v=vs.140)?redirectedfrom=MSDN - # and - # https://devblogs.microsoft.com/oldnewthing/20190108-00/?p=100655 - expected_exit = ( - -signal.SIGABRT, - # But beginning on Python 3.11, the faulthandler - # that prints the C backtraces sometimes segfaults after - # reporting the exception but before printing the stack. - # This has only been seen on linux/gcc. - -signal.SIGSEGV, - ) if not WIN else ( - 3, - 0xc0000409, - 0xc0000005, - ) - return expected_exit - - def get_process_uss(self): - """ - Return the current process's USS in bytes. - - uss is available on Linux, macOS, Windows. Also known as - "Unique Set Size", this is the memory which is unique to a - process and which would be freed if the process was terminated - right now. - - If this is not supported by ``psutil``, this raises the - :exc:`unittest.SkipTest` exception. - """ - try: - return psutil.Process().memory_full_info().uss - except AttributeError as e: - raise unittest.SkipTest("uss not supported") from e - - def run_script(self, script_name, show_output=True): - import subprocess - script = os.path.join( - os.path.dirname(__file__), - script_name, - ) - - try: - return subprocess.check_output([sys.executable, script], - encoding='utf-8', - stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as ex: - if show_output: - print('-----') - print('Failed to run script', script) - print('~~~~~') - print(ex.output) - print('------') - raise - - - def assertScriptRaises(self, script_name, exitcodes=None): - import subprocess - with self.assertRaises(subprocess.CalledProcessError) as exc: - output = self.run_script(script_name, show_output=False) - __traceback_info__ = output - # We're going to fail the assertion if we get here, at least - # preserve the output in the traceback. - - if exitcodes is None: - exitcodes = self.get_expected_returncodes_for_aborted_process() - self.assertIn(exc.exception.returncode, exitcodes) - return exc.exception diff --git a/myenv/Lib/site-packages/greenlet/tests/_test_extension.c b/myenv/Lib/site-packages/greenlet/tests/_test_extension.c deleted file mode 100644 index 05e81c0..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/_test_extension.c +++ /dev/null @@ -1,231 +0,0 @@ -/* This is a set of functions used by test_extension_interface.py to test the - * Greenlet C API. - */ - -#include "../greenlet.h" - -#ifndef Py_RETURN_NONE -# define Py_RETURN_NONE return Py_INCREF(Py_None), Py_None -#endif - -#define TEST_MODULE_NAME "_test_extension" - -static PyObject* -test_switch(PyObject* self, PyObject* greenlet) -{ - PyObject* result = NULL; - - if (greenlet == NULL || !PyGreenlet_Check(greenlet)) { - PyErr_BadArgument(); - return NULL; - } - - result = PyGreenlet_Switch((PyGreenlet*)greenlet, NULL, NULL); - if (result == NULL) { - if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_AssertionError, - "greenlet.switch() failed for some reason."); - } - return NULL; - } - Py_INCREF(result); - return result; -} - -static PyObject* -test_switch_kwargs(PyObject* self, PyObject* args, PyObject* kwargs) -{ - PyGreenlet* g = NULL; - PyObject* result = NULL; - - PyArg_ParseTuple(args, "O!", &PyGreenlet_Type, &g); - - if (g == NULL || !PyGreenlet_Check(g)) { - PyErr_BadArgument(); - return NULL; - } - - result = PyGreenlet_Switch(g, NULL, kwargs); - if (result == NULL) { - if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_AssertionError, - "greenlet.switch() failed for some reason."); - } - return NULL; - } - Py_XINCREF(result); - return result; -} - -static PyObject* -test_getcurrent(PyObject* self) -{ - PyGreenlet* g = PyGreenlet_GetCurrent(); - if (g == NULL || !PyGreenlet_Check(g) || !PyGreenlet_ACTIVE(g)) { - PyErr_SetString(PyExc_AssertionError, - "getcurrent() returned an invalid greenlet"); - Py_XDECREF(g); - return NULL; - } - Py_DECREF(g); - Py_RETURN_NONE; -} - -static PyObject* -test_setparent(PyObject* self, PyObject* arg) -{ - PyGreenlet* current; - PyGreenlet* greenlet = NULL; - - if (arg == NULL || !PyGreenlet_Check(arg)) { - PyErr_BadArgument(); - return NULL; - } - if ((current = PyGreenlet_GetCurrent()) == NULL) { - return NULL; - } - greenlet = (PyGreenlet*)arg; - if (PyGreenlet_SetParent(greenlet, current)) { - Py_DECREF(current); - return NULL; - } - Py_DECREF(current); - if (PyGreenlet_Switch(greenlet, NULL, NULL) == NULL) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject* -test_new_greenlet(PyObject* self, PyObject* callable) -{ - PyObject* result = NULL; - PyGreenlet* greenlet = PyGreenlet_New(callable, NULL); - - if (!greenlet) { - return NULL; - } - - result = PyGreenlet_Switch(greenlet, NULL, NULL); - Py_CLEAR(greenlet); - if (result == NULL) { - return NULL; - } - - Py_INCREF(result); - return result; -} - -static PyObject* -test_raise_dead_greenlet(PyObject* self) -{ - PyErr_SetString(PyExc_GreenletExit, "test GreenletExit exception."); - return NULL; -} - -static PyObject* -test_raise_greenlet_error(PyObject* self) -{ - PyErr_SetString(PyExc_GreenletError, "test greenlet.error exception"); - return NULL; -} - -static PyObject* -test_throw(PyObject* self, PyGreenlet* g) -{ - const char msg[] = "take that sucka!"; - PyObject* msg_obj = Py_BuildValue("s", msg); - PyGreenlet_Throw(g, PyExc_ValueError, msg_obj, NULL); - Py_DECREF(msg_obj); - if (PyErr_Occurred()) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyObject* -test_throw_exact(PyObject* self, PyObject* args) -{ - PyGreenlet* g = NULL; - PyObject* typ = NULL; - PyObject* val = NULL; - PyObject* tb = NULL; - - if (!PyArg_ParseTuple(args, "OOOO:throw", &g, &typ, &val, &tb)) { - return NULL; - } - - PyGreenlet_Throw(g, typ, val, tb); - if (PyErr_Occurred()) { - return NULL; - } - Py_RETURN_NONE; -} - -static PyMethodDef test_methods[] = { - {"test_switch", - (PyCFunction)test_switch, - METH_O, - "Switch to the provided greenlet sending provided arguments, and \n" - "return the results."}, - {"test_switch_kwargs", - (PyCFunction)test_switch_kwargs, - METH_VARARGS | METH_KEYWORDS, - "Switch to the provided greenlet sending the provided keyword args."}, - {"test_getcurrent", - (PyCFunction)test_getcurrent, - METH_NOARGS, - "Test PyGreenlet_GetCurrent()"}, - {"test_setparent", - (PyCFunction)test_setparent, - METH_O, - "Se the parent of the provided greenlet and switch to it."}, - {"test_new_greenlet", - (PyCFunction)test_new_greenlet, - METH_O, - "Test PyGreenlet_New()"}, - {"test_raise_dead_greenlet", - (PyCFunction)test_raise_dead_greenlet, - METH_NOARGS, - "Just raise greenlet.GreenletExit"}, - {"test_raise_greenlet_error", - (PyCFunction)test_raise_greenlet_error, - METH_NOARGS, - "Just raise greenlet.error"}, - {"test_throw", - (PyCFunction)test_throw, - METH_O, - "Throw a ValueError at the provided greenlet"}, - {"test_throw_exact", - (PyCFunction)test_throw_exact, - METH_VARARGS, - "Throw exactly the arguments given at the provided greenlet"}, - {NULL, NULL, 0, NULL} -}; - - -#define INITERROR return NULL - -static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, - TEST_MODULE_NAME, - NULL, - 0, - test_methods, - NULL, - NULL, - NULL, - NULL}; - -PyMODINIT_FUNC -PyInit__test_extension(void) -{ - PyObject* module = NULL; - module = PyModule_Create(&moduledef); - - if (module == NULL) { - return NULL; - } - - PyGreenlet_Import(); - return module; -} diff --git a/myenv/Lib/site-packages/greenlet/tests/_test_extension.cp312-win_amd64.pyd b/myenv/Lib/site-packages/greenlet/tests/_test_extension.cp312-win_amd64.pyd deleted file mode 100644 index a93796d..0000000 Binary files a/myenv/Lib/site-packages/greenlet/tests/_test_extension.cp312-win_amd64.pyd and /dev/null differ diff --git a/myenv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cp312-win_amd64.pyd b/myenv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cp312-win_amd64.pyd deleted file mode 100644 index f80b6df..0000000 Binary files a/myenv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cp312-win_amd64.pyd and /dev/null differ diff --git a/myenv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cpp b/myenv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cpp deleted file mode 100644 index 5cbe6a7..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/_test_extension_cpp.cpp +++ /dev/null @@ -1,226 +0,0 @@ -/* This is a set of functions used to test C++ exceptions are not - * broken during greenlet switches - */ - -#include "../greenlet.h" -#include "../greenlet_compiler_compat.hpp" -#include -#include - -struct exception_t { - int depth; - exception_t(int depth) : depth(depth) {} -}; - -/* Functions are called via pointers to prevent inlining */ -static void (*p_test_exception_throw_nonstd)(int depth); -static void (*p_test_exception_throw_std)(); -static PyObject* (*p_test_exception_switch_recurse)(int depth, int left); - -static void -test_exception_throw_nonstd(int depth) -{ - throw exception_t(depth); -} - -static void -test_exception_throw_std() -{ - throw std::runtime_error("Thrown from an extension."); -} - -static PyObject* -test_exception_switch_recurse(int depth, int left) -{ - if (left > 0) { - return p_test_exception_switch_recurse(depth, left - 1); - } - - PyObject* result = NULL; - PyGreenlet* self = PyGreenlet_GetCurrent(); - if (self == NULL) - return NULL; - - try { - if (PyGreenlet_Switch(PyGreenlet_GET_PARENT(self), NULL, NULL) == NULL) { - Py_DECREF(self); - return NULL; - } - p_test_exception_throw_nonstd(depth); - PyErr_SetString(PyExc_RuntimeError, - "throwing C++ exception didn't work"); - } - catch (const exception_t& e) { - if (e.depth != depth) - PyErr_SetString(PyExc_AssertionError, "depth mismatch"); - else - result = PyLong_FromLong(depth); - } - catch (...) { - PyErr_SetString(PyExc_RuntimeError, "unexpected C++ exception"); - } - - Py_DECREF(self); - return result; -} - -/* test_exception_switch(int depth) - * - recurses depth times - * - switches to parent inside try/catch block - * - throws an exception that (expected to be caught in the same function) - * - verifies depth matches (exceptions shouldn't be caught in other greenlets) - */ -static PyObject* -test_exception_switch(PyObject* UNUSED(self), PyObject* args) -{ - int depth; - if (!PyArg_ParseTuple(args, "i", &depth)) - return NULL; - return p_test_exception_switch_recurse(depth, depth); -} - - -static PyObject* -py_test_exception_throw_nonstd(PyObject* self, PyObject* args) -{ - if (!PyArg_ParseTuple(args, "")) - return NULL; - p_test_exception_throw_nonstd(0); - PyErr_SetString(PyExc_AssertionError, "unreachable code running after throw"); - return NULL; -} - -static PyObject* -py_test_exception_throw_std(PyObject* self, PyObject* args) -{ - if (!PyArg_ParseTuple(args, "")) - return NULL; - p_test_exception_throw_std(); - PyErr_SetString(PyExc_AssertionError, "unreachable code running after throw"); - return NULL; -} - -static PyObject* -py_test_call(PyObject* self, PyObject* arg) -{ - PyObject* noargs = PyTuple_New(0); - PyObject* ret = PyObject_Call(arg, noargs, nullptr); - Py_DECREF(noargs); - return ret; -} - - - -/* test_exception_switch_and_do_in_g2(g2func) - * - creates new greenlet g2 to run g2func - * - switches to g2 inside try/catch block - * - verifies that no exception has been caught - * - * it is used together with test_exception_throw to verify that unhandled - * exceptions thrown in one greenlet do not propagate to other greenlet nor - * segfault the process. - */ -static PyObject* -test_exception_switch_and_do_in_g2(PyObject* self, PyObject* args) -{ - PyObject* g2func = NULL; - PyObject* result = NULL; - - if (!PyArg_ParseTuple(args, "O", &g2func)) - return NULL; - PyGreenlet* g2 = PyGreenlet_New(g2func, NULL); - if (!g2) { - return NULL; - } - - try { - result = PyGreenlet_Switch(g2, NULL, NULL); - if (!result) { - return NULL; - } - } - catch (const exception_t& e) { - /* if we are here the memory can be already corrupted and the program - * might crash before below py-level exception might become printed. - * -> print something to stderr to make it clear that we had entered - * this catch block. - * See comments in inner_bootstrap() - */ -#if defined(WIN32) || defined(_WIN32) - fprintf(stderr, "C++ exception unexpectedly caught in g1\n"); - PyErr_SetString(PyExc_AssertionError, "C++ exception unexpectedly caught in g1"); - Py_XDECREF(result); - return NULL; -#else - throw; -#endif - } - - Py_XDECREF(result); - Py_RETURN_NONE; -} - -static PyMethodDef test_methods[] = { - {"test_exception_switch", - (PyCFunction)&test_exception_switch, - METH_VARARGS, - "Switches to parent twice, to test exception handling and greenlet " - "switching."}, - {"test_exception_switch_and_do_in_g2", - (PyCFunction)&test_exception_switch_and_do_in_g2, - METH_VARARGS, - "Creates new greenlet g2 to run g2func and switches to it inside try/catch " - "block. Used together with test_exception_throw to verify that unhandled " - "C++ exceptions thrown in a greenlet doe not corrupt memory."}, - {"test_exception_throw_nonstd", - (PyCFunction)&py_test_exception_throw_nonstd, - METH_VARARGS, - "Throws non-standard C++ exception. Calling this function directly should abort the process." - }, - {"test_exception_throw_std", - (PyCFunction)&py_test_exception_throw_std, - METH_VARARGS, - "Throws standard C++ exception. Calling this function directly should abort the process." - }, - {"test_call", - (PyCFunction)&py_test_call, - METH_O, - "Call the given callable. Unlike calling it directly, this creates a " - "new C-level stack frame, which may be helpful in testing." - }, - {NULL, NULL, 0, NULL} -}; - - -static struct PyModuleDef moduledef = {PyModuleDef_HEAD_INIT, - "greenlet.tests._test_extension_cpp", - NULL, - 0, - test_methods, - NULL, - NULL, - NULL, - NULL}; - -PyMODINIT_FUNC -PyInit__test_extension_cpp(void) -{ - PyObject* module = NULL; - - module = PyModule_Create(&moduledef); - - if (module == NULL) { - return NULL; - } - - PyGreenlet_Import(); - if (_PyGreenlet_API == NULL) { - return NULL; - } - - p_test_exception_throw_nonstd = test_exception_throw_nonstd; - p_test_exception_throw_std = test_exception_throw_std; - p_test_exception_switch_recurse = test_exception_switch_recurse; - - return module; -} diff --git a/myenv/Lib/site-packages/greenlet/tests/fail_clearing_run_switches.py b/myenv/Lib/site-packages/greenlet/tests/fail_clearing_run_switches.py deleted file mode 100644 index 6dd1492..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/fail_clearing_run_switches.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -""" -If we have a run callable passed to the constructor or set as an -attribute, but we don't actually use that (because ``__getattribute__`` -or the like interferes), then when we clear callable before beginning -to run, there's an opportunity for Python code to run. - -""" -import greenlet - -g = None -main = greenlet.getcurrent() - -results = [] - -class RunCallable: - - def __del__(self): - results.append(('RunCallable', '__del__')) - main.switch('from RunCallable') - - -class G(greenlet.greenlet): - - def __getattribute__(self, name): - if name == 'run': - results.append(('G.__getattribute__', 'run')) - return run_func - return object.__getattribute__(self, name) - - -def run_func(): - results.append(('run_func', 'enter')) - - -g = G(RunCallable()) -# Try to start G. It will get to the point where it deletes -# its run callable C++ variable in inner_bootstrap. That triggers -# the __del__ method, which switches back to main before g -# actually even starts running. -x = g.switch() -results.append(('main: g.switch()', x)) -# In the C++ code, this results in g->g_switch() appearing to return, even though -# it has yet to run. -print('In main with', x, flush=True) -g.switch() -print('RESULTS', results) diff --git a/myenv/Lib/site-packages/greenlet/tests/fail_cpp_exception.py b/myenv/Lib/site-packages/greenlet/tests/fail_cpp_exception.py deleted file mode 100644 index fa4dc2e..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/fail_cpp_exception.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Helper for testing a C++ exception throw aborts the process. - -Takes one argument, the name of the function in :mod:`_test_extension_cpp` to call. -""" -import sys -import greenlet -from greenlet.tests import _test_extension_cpp -print('fail_cpp_exception is running') - -def run_unhandled_exception_in_greenlet_aborts(): - def _(): - _test_extension_cpp.test_exception_switch_and_do_in_g2( - _test_extension_cpp.test_exception_throw_nonstd - ) - g1 = greenlet.greenlet(_) - g1.switch() - - -func_name = sys.argv[1] -try: - func = getattr(_test_extension_cpp, func_name) -except AttributeError: - if func_name == run_unhandled_exception_in_greenlet_aborts.__name__: - func = run_unhandled_exception_in_greenlet_aborts - elif func_name == 'run_as_greenlet_target': - g = greenlet.greenlet(_test_extension_cpp.test_exception_throw_std) - func = g.switch - else: - raise -print('raising', func, flush=True) -func() diff --git a/myenv/Lib/site-packages/greenlet/tests/fail_initialstub_already_started.py b/myenv/Lib/site-packages/greenlet/tests/fail_initialstub_already_started.py deleted file mode 100644 index c1a44ef..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/fail_initialstub_already_started.py +++ /dev/null @@ -1,78 +0,0 @@ -""" -Testing initialstub throwing an already started exception. -""" - -import greenlet - -a = None -b = None -c = None -main = greenlet.getcurrent() - -# If we switch into a dead greenlet, -# we go looking for its parents. -# if a parent is not yet started, we start it. - -results = [] - -def a_run(*args): - #results.append('A') - results.append(('Begin A', args)) - - -def c_run(): - results.append('Begin C') - b.switch('From C') - results.append('C done') - -class A(greenlet.greenlet): pass - -class B(greenlet.greenlet): - doing_it = False - def __getattribute__(self, name): - if name == 'run' and not self.doing_it: - assert greenlet.getcurrent() is c - self.doing_it = True - results.append('Switch to b from B.__getattribute__ in ' - + type(greenlet.getcurrent()).__name__) - b.switch() - results.append('B.__getattribute__ back from main in ' - + type(greenlet.getcurrent()).__name__) - if name == 'run': - name = '_B_run' - return object.__getattribute__(self, name) - - def _B_run(self, *arg): - results.append(('Begin B', arg)) - results.append('_B_run switching to main') - main.switch('From B') - -class C(greenlet.greenlet): - pass -a = A(a_run) -b = B(parent=a) -c = C(c_run, b) - -# Start a child; while running, it will start B, -# but starting B will ALSO start B. -result = c.switch() -results.append(('main from c', result)) - -# Switch back to C, which was in the middle of switching -# already. This will throw the ``GreenletStartedWhileInPython`` -# exception, which results in parent A getting started (B is finished) -c.switch() - -results.append(('A dead?', a.dead, 'B dead?', b.dead, 'C dead?', c.dead)) - -# A and B should both be dead now. -assert a.dead -assert b.dead -assert not c.dead - -result = c.switch() -results.append(('main from c.2', result)) -# Now C is dead -assert c.dead - -print("RESULTS:", results) diff --git a/myenv/Lib/site-packages/greenlet/tests/fail_slp_switch.py b/myenv/Lib/site-packages/greenlet/tests/fail_slp_switch.py deleted file mode 100644 index 0990526..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/fail_slp_switch.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -""" -A test helper for seeing what happens when slp_switch() -fails. -""" -# pragma: no cover - -import greenlet - - -print('fail_slp_switch is running', flush=True) - -runs = [] -def func(): - runs.append(1) - greenlet.getcurrent().parent.switch() - runs.append(2) - greenlet.getcurrent().parent.switch() - runs.append(3) - -g = greenlet._greenlet.UnswitchableGreenlet(func) -g.switch() -assert runs == [1] -g.switch() -assert runs == [1, 2] -g.force_slp_switch_error = True - -# This should crash. -g.switch() diff --git a/myenv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets.py b/myenv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets.py deleted file mode 100644 index e151b19..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets.py +++ /dev/null @@ -1,44 +0,0 @@ -""" -Uses a trace function to switch greenlets at unexpected times. - -In the trace function, we switch from the current greenlet to another -greenlet, which switches -""" -import greenlet - -g1 = None -g2 = None - -switch_to_g2 = False - -def tracefunc(*args): - print('TRACE', *args) - global switch_to_g2 - if switch_to_g2: - switch_to_g2 = False - g2.switch() - print('\tLEAVE TRACE', *args) - -def g1_run(): - print('In g1_run') - global switch_to_g2 - switch_to_g2 = True - from_parent = greenlet.getcurrent().parent.switch() - print('Return to g1_run') - print('From parent', from_parent) - -def g2_run(): - #g1.switch() - greenlet.getcurrent().parent.switch() - -greenlet.settrace(tracefunc) - -g1 = greenlet.greenlet(g1_run) -g2 = greenlet.greenlet(g2_run) - -# This switch didn't actually finish! -# And if it did, it would raise TypeError -# because g1_run() doesn't take any arguments. -g1.switch(1) -print('Back in main') -g1.switch(2) diff --git a/myenv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets2.py b/myenv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets2.py deleted file mode 100644 index 1f6b66b..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/fail_switch_three_greenlets2.py +++ /dev/null @@ -1,55 +0,0 @@ -""" -Like fail_switch_three_greenlets, but the call into g1_run would actually be -valid. -""" -import greenlet - -g1 = None -g2 = None - -switch_to_g2 = True - -results = [] - -def tracefunc(*args): - results.append(('trace', args[0])) - print('TRACE', *args) - global switch_to_g2 - if switch_to_g2: - switch_to_g2 = False - g2.switch('g2 from tracefunc') - print('\tLEAVE TRACE', *args) - -def g1_run(arg): - results.append(('g1 arg', arg)) - print('In g1_run') - from_parent = greenlet.getcurrent().parent.switch('from g1_run') - results.append(('g1 from parent', from_parent)) - return 'g1 done' - -def g2_run(arg): - #g1.switch() - results.append(('g2 arg', arg)) - parent = greenlet.getcurrent().parent.switch('from g2_run') - global switch_to_g2 - switch_to_g2 = False - results.append(('g2 from parent', parent)) - return 'g2 done' - - -greenlet.settrace(tracefunc) - -g1 = greenlet.greenlet(g1_run) -g2 = greenlet.greenlet(g2_run) - -x = g1.switch('g1 from main') -results.append(('main g1', x)) -print('Back in main', x) -x = g1.switch('g2 from main') -results.append(('main g2', x)) -print('back in amain again', x) -x = g1.switch('g1 from main 2') -results.append(('main g1.2', x)) -x = g2.switch() -results.append(('main g2.2', x)) -print("RESULTS:", results) diff --git a/myenv/Lib/site-packages/greenlet/tests/fail_switch_two_greenlets.py b/myenv/Lib/site-packages/greenlet/tests/fail_switch_two_greenlets.py deleted file mode 100644 index 3e52345..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/fail_switch_two_greenlets.py +++ /dev/null @@ -1,41 +0,0 @@ -""" -Uses a trace function to switch greenlets at unexpected times. - -In the trace function, we switch from the current greenlet to another -greenlet, which switches -""" -import greenlet - -g1 = None -g2 = None - -switch_to_g2 = False - -def tracefunc(*args): - print('TRACE', *args) - global switch_to_g2 - if switch_to_g2: - switch_to_g2 = False - g2.switch() - print('\tLEAVE TRACE', *args) - -def g1_run(): - print('In g1_run') - global switch_to_g2 - switch_to_g2 = True - greenlet.getcurrent().parent.switch() - print('Return to g1_run') - print('Falling off end of g1_run') - -def g2_run(): - g1.switch() - print('Falling off end of g2') - -greenlet.settrace(tracefunc) - -g1 = greenlet.greenlet(g1_run) -g2 = greenlet.greenlet(g2_run) - -g1.switch() -print('Falling off end of main') -g2.switch() diff --git a/myenv/Lib/site-packages/greenlet/tests/leakcheck.py b/myenv/Lib/site-packages/greenlet/tests/leakcheck.py deleted file mode 100644 index a5152fb..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/leakcheck.py +++ /dev/null @@ -1,319 +0,0 @@ -# Copyright (c) 2018 gevent community -# Copyright (c) 2021 greenlet community -# -# This was originally part of gevent's test suite. The main author -# (Jason Madden) vendored a copy of it into greenlet. -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in -# all copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -# THE SOFTWARE. -from __future__ import print_function - -import os -import sys -import gc - -from functools import wraps -import unittest - - -import objgraph - -# graphviz 0.18 (Nov 7 2021), available only on Python 3.6 and newer, -# has added type hints (sigh). It wants to use ``typing.Literal`` for -# some stuff, but that's only available on Python 3.9+. If that's not -# found, it creates a ``unittest.mock.MagicMock`` object and annotates -# with that. These are GC'able objects, and doing almost *anything* -# with them results in an explosion of objects. For example, trying to -# compare them for equality creates new objects. This causes our -# leakchecks to fail, with reports like: -# -# greenlet.tests.leakcheck.LeakCheckError: refcount increased by [337, 1333, 343, 430, 530, 643, 769] -# _Call 1820 +546 -# dict 4094 +76 -# MagicProxy 585 +73 -# tuple 2693 +66 -# _CallList 24 +3 -# weakref 1441 +1 -# function 5996 +1 -# type 736 +1 -# cell 592 +1 -# MagicMock 8 +1 -# -# To avoid this, we *could* filter this type of object out early. In -# principle it could leak, but we don't use mocks in greenlet, so it -# doesn't leak from us. However, a further issue is that ``MagicMock`` -# objects have subobjects that are also GC'able, like ``_Call``, and -# those create new mocks of their own too. So we'd have to filter them -# as well, and they're not public. That's OK, we can workaround the -# problem by being very careful to never compare by equality or other -# user-defined operators, only using object identity or other builtin -# functions. - -RUNNING_ON_GITHUB_ACTIONS = os.environ.get('GITHUB_ACTIONS') -RUNNING_ON_TRAVIS = os.environ.get('TRAVIS') or RUNNING_ON_GITHUB_ACTIONS -RUNNING_ON_APPVEYOR = os.environ.get('APPVEYOR') -RUNNING_ON_CI = RUNNING_ON_TRAVIS or RUNNING_ON_APPVEYOR -RUNNING_ON_MANYLINUX = os.environ.get('GREENLET_MANYLINUX') -SKIP_LEAKCHECKS = RUNNING_ON_MANYLINUX or os.environ.get('GREENLET_SKIP_LEAKCHECKS') -SKIP_FAILING_LEAKCHECKS = os.environ.get('GREENLET_SKIP_FAILING_LEAKCHECKS') -ONLY_FAILING_LEAKCHECKS = os.environ.get('GREENLET_ONLY_FAILING_LEAKCHECKS') - -def ignores_leakcheck(func): - """ - Ignore the given object during leakchecks. - - Can be applied to a method, in which case the method will run, but - will not be subject to leak checks. - - If applied to a class, the entire class will be skipped during leakchecks. This - is intended to be used for classes that are very slow and cause problems such as - test timeouts; typically it will be used for classes that are subclasses of a base - class and specify variants of behaviour (such as pool sizes). - """ - func.ignore_leakcheck = True - return func - -def fails_leakcheck(func): - """ - Mark that the function is known to leak. - """ - func.fails_leakcheck = True - if SKIP_FAILING_LEAKCHECKS: - func = unittest.skip("Skipping known failures")(func) - return func - -class LeakCheckError(AssertionError): - pass - -if hasattr(sys, 'getobjects'): - # In a Python build with ``--with-trace-refs``, make objgraph - # trace *all* the objects, not just those that are tracked by the - # GC - class _MockGC(object): - def get_objects(self): - return sys.getobjects(0) # pylint:disable=no-member - def __getattr__(self, name): - return getattr(gc, name) - objgraph.gc = _MockGC() - fails_strict_leakcheck = fails_leakcheck -else: - def fails_strict_leakcheck(func): - """ - Decorator for a function that is known to fail when running - strict (``sys.getobjects()``) leakchecks. - - This type of leakcheck finds all objects, even those, such as - strings, which are not tracked by the garbage collector. - """ - return func - -class ignores_types_in_strict_leakcheck(object): - def __init__(self, types): - self.types = types - def __call__(self, func): - func.leakcheck_ignore_types = self.types - return func - -class _RefCountChecker(object): - - # Some builtin things that we ignore - # XXX: Those things were ignored by gevent, but they're important here, - # presumably. - IGNORED_TYPES = () #(tuple, dict, types.FrameType, types.TracebackType) - - def __init__(self, testcase, function): - self.testcase = testcase - self.function = function - self.deltas = [] - self.peak_stats = {} - self.ignored_types = () - - # The very first time we are called, we have already been - # self.setUp() by the test runner, so we don't need to do it again. - self.needs_setUp = False - - def _include_object_p(self, obj): - # pylint:disable=too-many-return-statements - # - # See the comment block at the top. We must be careful to - # avoid invoking user-defined operations. - if obj is self: - return False - kind = type(obj) - # ``self._include_object_p == obj`` returns NotImplemented - # for non-function objects, which causes the interpreter - # to try to reverse the order of arguments...which leads - # to the explosion of mock objects. We don't want that, so we implement - # the check manually. - if kind == type(self._include_object_p): - try: - # pylint:disable=not-callable - exact_method_equals = self._include_object_p.__eq__(obj) - except AttributeError: - # Python 2.7 methods may only have __cmp__, and that raises a - # TypeError for non-method arguments - # pylint:disable=no-member - exact_method_equals = self._include_object_p.__cmp__(obj) == 0 - - if exact_method_equals is not NotImplemented and exact_method_equals: - return False - - # Similarly, we need to check identity in our __dict__ to avoid mock explosions. - for x in self.__dict__.values(): - if obj is x: - return False - - - if kind in self.ignored_types or kind in self.IGNORED_TYPES: - return False - - return True - - def _growth(self): - return objgraph.growth(limit=None, peak_stats=self.peak_stats, - filter=self._include_object_p) - - def _report_diff(self, growth): - if not growth: - return "" - - lines = [] - width = max(len(name) for name, _, _ in growth) - for name, count, delta in growth: - lines.append('%-*s%9d %+9d' % (width, name, count, delta)) - - diff = '\n'.join(lines) - return diff - - - def _run_test(self, args, kwargs): - gc_enabled = gc.isenabled() - gc.disable() - - if self.needs_setUp: - self.testcase.setUp() - self.testcase.skipTearDown = False - try: - self.function(self.testcase, *args, **kwargs) - finally: - self.testcase.tearDown() - self.testcase.doCleanups() - self.testcase.skipTearDown = True - self.needs_setUp = True - if gc_enabled: - gc.enable() - - def _growth_after(self): - # Grab post snapshot - # pylint:disable=no-member - if 'urlparse' in sys.modules: - sys.modules['urlparse'].clear_cache() - if 'urllib.parse' in sys.modules: - sys.modules['urllib.parse'].clear_cache() - - return self._growth() - - def _check_deltas(self, growth): - # Return false when we have decided there is no leak, - # true if we should keep looping, raises an assertion - # if we have decided there is a leak. - - deltas = self.deltas - if not deltas: - # We haven't run yet, no data, keep looping - return True - - if gc.garbage: - raise LeakCheckError("Generated uncollectable garbage %r" % (gc.garbage,)) - - - # the following configurations are classified as "no leak" - # [0, 0] - # [x, 0, 0] - # [... a, b, c, d] where a+b+c+d = 0 - # - # the following configurations are classified as "leak" - # [... z, z, z] where z > 0 - - if deltas[-2:] == [0, 0] and len(deltas) in (2, 3): - return False - - if deltas[-3:] == [0, 0, 0]: - return False - - if len(deltas) >= 4 and sum(deltas[-4:]) == 0: - return False - - if len(deltas) >= 3 and deltas[-1] > 0 and deltas[-1] == deltas[-2] and deltas[-2] == deltas[-3]: - diff = self._report_diff(growth) - raise LeakCheckError('refcount increased by %r\n%s' % (deltas, diff)) - - # OK, we don't know for sure yet. Let's search for more - if sum(deltas[-3:]) <= 0 or sum(deltas[-4:]) <= 0 or deltas[-4:].count(0) >= 2: - # this is suspicious, so give a few more runs - limit = 11 - else: - limit = 7 - if len(deltas) >= limit: - raise LeakCheckError('refcount increased by %r\n%s' - % (deltas, - self._report_diff(growth))) - - # We couldn't decide yet, keep going - return True - - def __call__(self, args, kwargs): - for _ in range(3): - gc.collect() - - expect_failure = getattr(self.function, 'fails_leakcheck', False) - if expect_failure: - self.testcase.expect_greenlet_leak = True - self.ignored_types = getattr(self.function, "leakcheck_ignore_types", ()) - - # Capture state before; the incremental will be - # updated by each call to _growth_after - growth = self._growth() - - try: - while self._check_deltas(growth): - self._run_test(args, kwargs) - - growth = self._growth_after() - - self.deltas.append(sum((stat[2] for stat in growth))) - except LeakCheckError: - if not expect_failure: - raise - else: - if expect_failure: - raise LeakCheckError("Expected %s to leak but it did not." % (self.function,)) - -def wrap_refcount(method): - if getattr(method, 'ignore_leakcheck', False) or SKIP_LEAKCHECKS: - return method - - @wraps(method) - def wrapper(self, *args, **kwargs): # pylint:disable=too-many-branches - if getattr(self, 'ignore_leakcheck', False): - raise unittest.SkipTest("This class ignored during leakchecks") - if ONLY_FAILING_LEAKCHECKS and not getattr(method, 'fails_leakcheck', False): - raise unittest.SkipTest("Only running tests that fail leakchecks.") - return _RefCountChecker(self, method)(args, kwargs) - - return wrapper diff --git a/myenv/Lib/site-packages/greenlet/tests/test_contextvars.py b/myenv/Lib/site-packages/greenlet/tests/test_contextvars.py deleted file mode 100644 index 9a16f67..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_contextvars.py +++ /dev/null @@ -1,310 +0,0 @@ -from __future__ import print_function - -import gc -import sys -import unittest - -from functools import partial -from unittest import skipUnless -from unittest import skipIf - -from greenlet import greenlet -from greenlet import getcurrent -from . import TestCase - - -try: - from contextvars import Context - from contextvars import ContextVar - from contextvars import copy_context - # From the documentation: - # - # Important: Context Variables should be created at the top module - # level and never in closures. Context objects hold strong - # references to context variables which prevents context variables - # from being properly garbage collected. - ID_VAR = ContextVar("id", default=None) - VAR_VAR = ContextVar("var", default=None) - ContextVar = None -except ImportError: - Context = ContextVar = copy_context = None - -# We don't support testing if greenlet's built-in context var support is disabled. -@skipUnless(Context is not None, "ContextVar not supported") -class ContextVarsTests(TestCase): - def _new_ctx_run(self, *args, **kwargs): - return copy_context().run(*args, **kwargs) - - def _increment(self, greenlet_id, callback, counts, expect): - ctx_var = ID_VAR - if expect is None: - self.assertIsNone(ctx_var.get()) - else: - self.assertEqual(ctx_var.get(), expect) - ctx_var.set(greenlet_id) - for _ in range(2): - counts[ctx_var.get()] += 1 - callback() - - def _test_context(self, propagate_by): - # pylint:disable=too-many-branches - ID_VAR.set(0) - - callback = getcurrent().switch - counts = dict((i, 0) for i in range(5)) - - lets = [ - greenlet(partial( - partial( - copy_context().run, - self._increment - ) if propagate_by == "run" else self._increment, - greenlet_id=i, - callback=callback, - counts=counts, - expect=( - i - 1 if propagate_by == "share" else - 0 if propagate_by in ("set", "run") else None - ) - )) - for i in range(1, 5) - ] - - for let in lets: - if propagate_by == "set": - let.gr_context = copy_context() - elif propagate_by == "share": - let.gr_context = getcurrent().gr_context - - for i in range(2): - counts[ID_VAR.get()] += 1 - for let in lets: - let.switch() - - if propagate_by == "run": - # Must leave each context.run() in reverse order of entry - for let in reversed(lets): - let.switch() - else: - # No context.run(), so fine to exit in any order. - for let in lets: - let.switch() - - for let in lets: - self.assertTrue(let.dead) - # When using run(), we leave the run() as the greenlet dies, - # and there's no context "underneath". When not using run(), - # gr_context still reflects the context the greenlet was - # running in. - if propagate_by == 'run': - self.assertIsNone(let.gr_context) - else: - self.assertIsNotNone(let.gr_context) - - - if propagate_by == "share": - self.assertEqual(counts, {0: 1, 1: 1, 2: 1, 3: 1, 4: 6}) - else: - self.assertEqual(set(counts.values()), set([2])) - - def test_context_propagated_by_context_run(self): - self._new_ctx_run(self._test_context, "run") - - def test_context_propagated_by_setting_attribute(self): - self._new_ctx_run(self._test_context, "set") - - def test_context_not_propagated(self): - self._new_ctx_run(self._test_context, None) - - def test_context_shared(self): - self._new_ctx_run(self._test_context, "share") - - def test_break_ctxvars(self): - let1 = greenlet(copy_context().run) - let2 = greenlet(copy_context().run) - let1.switch(getcurrent().switch) - let2.switch(getcurrent().switch) - # Since let2 entered the current context and let1 exits its own, the - # interpreter emits: - # RuntimeError: cannot exit context: thread state references a different context object - let1.switch() - - def test_not_broken_if_using_attribute_instead_of_context_run(self): - let1 = greenlet(getcurrent().switch) - let2 = greenlet(getcurrent().switch) - let1.gr_context = copy_context() - let2.gr_context = copy_context() - let1.switch() - let2.switch() - let1.switch() - let2.switch() - - def test_context_assignment_while_running(self): - # pylint:disable=too-many-statements - ID_VAR.set(None) - - def target(): - self.assertIsNone(ID_VAR.get()) - self.assertIsNone(gr.gr_context) - - # Context is created on first use - ID_VAR.set(1) - self.assertIsInstance(gr.gr_context, Context) - self.assertEqual(ID_VAR.get(), 1) - self.assertEqual(gr.gr_context[ID_VAR], 1) - - # Clearing the context makes it get re-created as another - # empty context when next used - old_context = gr.gr_context - gr.gr_context = None # assign None while running - self.assertIsNone(ID_VAR.get()) - self.assertIsNone(gr.gr_context) - ID_VAR.set(2) - self.assertIsInstance(gr.gr_context, Context) - self.assertEqual(ID_VAR.get(), 2) - self.assertEqual(gr.gr_context[ID_VAR], 2) - - new_context = gr.gr_context - getcurrent().parent.switch((old_context, new_context)) - # parent switches us back to old_context - - self.assertEqual(ID_VAR.get(), 1) - gr.gr_context = new_context # assign non-None while running - self.assertEqual(ID_VAR.get(), 2) - - getcurrent().parent.switch() - # parent switches us back to no context - self.assertIsNone(ID_VAR.get()) - self.assertIsNone(gr.gr_context) - gr.gr_context = old_context - self.assertEqual(ID_VAR.get(), 1) - - getcurrent().parent.switch() - # parent switches us back to no context - self.assertIsNone(ID_VAR.get()) - self.assertIsNone(gr.gr_context) - - gr = greenlet(target) - - with self.assertRaisesRegex(AttributeError, "can't delete context attribute"): - del gr.gr_context - - self.assertIsNone(gr.gr_context) - old_context, new_context = gr.switch() - self.assertIs(new_context, gr.gr_context) - self.assertEqual(old_context[ID_VAR], 1) - self.assertEqual(new_context[ID_VAR], 2) - self.assertEqual(new_context.run(ID_VAR.get), 2) - gr.gr_context = old_context # assign non-None while suspended - gr.switch() - self.assertIs(gr.gr_context, new_context) - gr.gr_context = None # assign None while suspended - gr.switch() - self.assertIs(gr.gr_context, old_context) - gr.gr_context = None - gr.switch() - self.assertIsNone(gr.gr_context) - - # Make sure there are no reference leaks - gr = None - gc.collect() - self.assertEqual(sys.getrefcount(old_context), 2) - self.assertEqual(sys.getrefcount(new_context), 2) - - def test_context_assignment_different_thread(self): - import threading - VAR_VAR.set(None) - ctx = Context() - - is_running = threading.Event() - should_suspend = threading.Event() - did_suspend = threading.Event() - should_exit = threading.Event() - holder = [] - - def greenlet_in_thread_fn(): - VAR_VAR.set(1) - is_running.set() - should_suspend.wait(10) - VAR_VAR.set(2) - getcurrent().parent.switch() - holder.append(VAR_VAR.get()) - - def thread_fn(): - gr = greenlet(greenlet_in_thread_fn) - gr.gr_context = ctx - holder.append(gr) - gr.switch() - did_suspend.set() - should_exit.wait(10) - gr.switch() - del gr - greenlet() # trigger cleanup - - thread = threading.Thread(target=thread_fn, daemon=True) - thread.start() - is_running.wait(10) - gr = holder[0] - - # Can't access or modify context if the greenlet is running - # in a different thread - with self.assertRaisesRegex(ValueError, "running in a different"): - getattr(gr, 'gr_context') - with self.assertRaisesRegex(ValueError, "running in a different"): - gr.gr_context = None - - should_suspend.set() - did_suspend.wait(10) - - # OK to access and modify context if greenlet is suspended - self.assertIs(gr.gr_context, ctx) - self.assertEqual(gr.gr_context[VAR_VAR], 2) - gr.gr_context = None - - should_exit.set() - thread.join(10) - - self.assertEqual(holder, [gr, None]) - - # Context can still be accessed/modified when greenlet is dead: - self.assertIsNone(gr.gr_context) - gr.gr_context = ctx - self.assertIs(gr.gr_context, ctx) - - # Otherwise we leak greenlets on some platforms. - # XXX: Should be able to do this automatically - del holder[:] - gr = None - thread = None - - def test_context_assignment_wrong_type(self): - g = greenlet() - with self.assertRaisesRegex(TypeError, - "greenlet context must be a contextvars.Context or None"): - g.gr_context = self - - -@skipIf(Context is not None, "ContextVar supported") -class NoContextVarsTests(TestCase): - def test_contextvars_errors(self): - let1 = greenlet(getcurrent().switch) - self.assertFalse(hasattr(let1, 'gr_context')) - with self.assertRaises(AttributeError): - getattr(let1, 'gr_context') - - with self.assertRaises(AttributeError): - let1.gr_context = None - - let1.switch() - - with self.assertRaises(AttributeError): - getattr(let1, 'gr_context') - - with self.assertRaises(AttributeError): - let1.gr_context = None - - del let1 - - -if __name__ == '__main__': - unittest.main() diff --git a/myenv/Lib/site-packages/greenlet/tests/test_cpp.py b/myenv/Lib/site-packages/greenlet/tests/test_cpp.py deleted file mode 100644 index 2d0cc9c..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_cpp.py +++ /dev/null @@ -1,73 +0,0 @@ -from __future__ import print_function -from __future__ import absolute_import - -import subprocess -import unittest - -import greenlet -from . import _test_extension_cpp -from . import TestCase -from . import WIN - -class CPPTests(TestCase): - def test_exception_switch(self): - greenlets = [] - for i in range(4): - g = greenlet.greenlet(_test_extension_cpp.test_exception_switch) - g.switch(i) - greenlets.append(g) - for i, g in enumerate(greenlets): - self.assertEqual(g.switch(), i) - - def _do_test_unhandled_exception(self, target): - import os - import sys - script = os.path.join( - os.path.dirname(__file__), - 'fail_cpp_exception.py', - ) - args = [sys.executable, script, target.__name__ if not isinstance(target, str) else target] - __traceback_info__ = args - with self.assertRaises(subprocess.CalledProcessError) as exc: - subprocess.check_output( - args, - encoding='utf-8', - stderr=subprocess.STDOUT - ) - - ex = exc.exception - expected_exit = self.get_expected_returncodes_for_aborted_process() - self.assertIn(ex.returncode, expected_exit) - self.assertIn('fail_cpp_exception is running', ex.output) - return ex.output - - - def test_unhandled_nonstd_exception_aborts(self): - # verify that plain unhandled throw aborts - self._do_test_unhandled_exception(_test_extension_cpp.test_exception_throw_nonstd) - - def test_unhandled_std_exception_aborts(self): - # verify that plain unhandled throw aborts - self._do_test_unhandled_exception(_test_extension_cpp.test_exception_throw_std) - - @unittest.skipIf(WIN, "XXX: This does not crash on Windows") - # Meaning the exception is getting lost somewhere... - def test_unhandled_std_exception_as_greenlet_function_aborts(self): - # verify that plain unhandled throw aborts - output = self._do_test_unhandled_exception('run_as_greenlet_target') - self.assertIn( - # We really expect this to be prefixed with "greenlet: Unhandled C++ exception:" - # as added by our handler for std::exception (see TUserGreenlet.cpp), but - # that's not correct everywhere --- our handler never runs before std::terminate - # gets called (for example, on arm32). - 'Thrown from an extension.', - output - ) - - def test_unhandled_exception_in_greenlet_aborts(self): - # verify that unhandled throw called in greenlet aborts too - self._do_test_unhandled_exception('run_unhandled_exception_in_greenlet_aborts') - - -if __name__ == '__main__': - unittest.main() diff --git a/myenv/Lib/site-packages/greenlet/tests/test_extension_interface.py b/myenv/Lib/site-packages/greenlet/tests/test_extension_interface.py deleted file mode 100644 index 34b6656..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_extension_interface.py +++ /dev/null @@ -1,115 +0,0 @@ -from __future__ import print_function -from __future__ import absolute_import - -import sys - -import greenlet -from . import _test_extension -from . import TestCase - -# pylint:disable=c-extension-no-member - -class CAPITests(TestCase): - def test_switch(self): - self.assertEqual( - 50, _test_extension.test_switch(greenlet.greenlet(lambda: 50))) - - def test_switch_kwargs(self): - def adder(x, y): - return x * y - g = greenlet.greenlet(adder) - self.assertEqual(6, _test_extension.test_switch_kwargs(g, x=3, y=2)) - - def test_setparent(self): - # pylint:disable=disallowed-name - def foo(): - def bar(): - greenlet.getcurrent().parent.switch() - - # This final switch should go back to the main greenlet, since - # the test_setparent() function in the C extension should have - # reparented this greenlet. - greenlet.getcurrent().parent.switch() - raise AssertionError("Should never have reached this code") - child = greenlet.greenlet(bar) - child.switch() - greenlet.getcurrent().parent.switch(child) - greenlet.getcurrent().parent.throw( - AssertionError("Should never reach this code")) - foo_child = greenlet.greenlet(foo).switch() - self.assertEqual(None, _test_extension.test_setparent(foo_child)) - - def test_getcurrent(self): - _test_extension.test_getcurrent() - - def test_new_greenlet(self): - self.assertEqual(-15, _test_extension.test_new_greenlet(lambda: -15)) - - def test_raise_greenlet_dead(self): - self.assertRaises( - greenlet.GreenletExit, _test_extension.test_raise_dead_greenlet) - - def test_raise_greenlet_error(self): - self.assertRaises( - greenlet.error, _test_extension.test_raise_greenlet_error) - - def test_throw(self): - seen = [] - - def foo(): # pylint:disable=disallowed-name - try: - greenlet.getcurrent().parent.switch() - except ValueError: - seen.append(sys.exc_info()[1]) - except greenlet.GreenletExit: - raise AssertionError - g = greenlet.greenlet(foo) - g.switch() - _test_extension.test_throw(g) - self.assertEqual(len(seen), 1) - self.assertTrue( - isinstance(seen[0], ValueError), - "ValueError was not raised in foo()") - self.assertEqual( - str(seen[0]), - 'take that sucka!', - "message doesn't match") - - def test_non_traceback_param(self): - with self.assertRaises(TypeError) as exc: - _test_extension.test_throw_exact( - greenlet.getcurrent(), - Exception, - Exception(), - self - ) - self.assertEqual(str(exc.exception), - "throw() third argument must be a traceback object") - - def test_instance_of_wrong_type(self): - with self.assertRaises(TypeError) as exc: - _test_extension.test_throw_exact( - greenlet.getcurrent(), - Exception(), - BaseException(), - None, - ) - - self.assertEqual(str(exc.exception), - "instance exception may not have a separate value") - - def test_not_throwable(self): - with self.assertRaises(TypeError) as exc: - _test_extension.test_throw_exact( - greenlet.getcurrent(), - "abc", - None, - None, - ) - self.assertEqual(str(exc.exception), - "exceptions must be classes, or instances, not str") - - -if __name__ == '__main__': - import unittest - unittest.main() diff --git a/myenv/Lib/site-packages/greenlet/tests/test_gc.py b/myenv/Lib/site-packages/greenlet/tests/test_gc.py deleted file mode 100644 index 994addb..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_gc.py +++ /dev/null @@ -1,86 +0,0 @@ -import gc - -import weakref - -import greenlet - - -from . import TestCase -from .leakcheck import fails_leakcheck -# These only work with greenlet gc support -# which is no longer optional. -assert greenlet.GREENLET_USE_GC - -class GCTests(TestCase): - def test_dead_circular_ref(self): - o = weakref.ref(greenlet.greenlet(greenlet.getcurrent).switch()) - gc.collect() - if o() is not None: - import sys - print("O IS NOT NONE.", sys.getrefcount(o())) - self.assertIsNone(o()) - self.assertFalse(gc.garbage, gc.garbage) - - def test_circular_greenlet(self): - class circular_greenlet(greenlet.greenlet): - self = None - o = circular_greenlet() - o.self = o - o = weakref.ref(o) - gc.collect() - self.assertIsNone(o()) - self.assertFalse(gc.garbage, gc.garbage) - - def test_inactive_ref(self): - class inactive_greenlet(greenlet.greenlet): - def __init__(self): - greenlet.greenlet.__init__(self, run=self.run) - - def run(self): - pass - o = inactive_greenlet() - o = weakref.ref(o) - gc.collect() - self.assertIsNone(o()) - self.assertFalse(gc.garbage, gc.garbage) - - @fails_leakcheck - def test_finalizer_crash(self): - # This test is designed to crash when active greenlets - # are made garbage collectable, until the underlying - # problem is resolved. How does it work: - # - order of object creation is important - # - array is created first, so it is moved to unreachable first - # - we create a cycle between a greenlet and this array - # - we create an object that participates in gc, is only - # referenced by a greenlet, and would corrupt gc lists - # on destruction, the easiest is to use an object with - # a finalizer - # - because array is the first object in unreachable it is - # cleared first, which causes all references to greenlet - # to disappear and causes greenlet to be destroyed, but since - # it is still live it causes a switch during gc, which causes - # an object with finalizer to be destroyed, which causes stack - # corruption and then a crash - - class object_with_finalizer(object): - def __del__(self): - pass - array = [] - parent = greenlet.getcurrent() - def greenlet_body(): - greenlet.getcurrent().object = object_with_finalizer() - try: - parent.switch() - except greenlet.GreenletExit: - print("Got greenlet exit!") - finally: - del greenlet.getcurrent().object - g = greenlet.greenlet(greenlet_body) - g.array = array - array.append(g) - g.switch() - del array - del g - greenlet.getcurrent() - gc.collect() diff --git a/myenv/Lib/site-packages/greenlet/tests/test_generator.py b/myenv/Lib/site-packages/greenlet/tests/test_generator.py deleted file mode 100644 index ca4a644..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_generator.py +++ /dev/null @@ -1,59 +0,0 @@ - -from greenlet import greenlet - -from . import TestCase - -class genlet(greenlet): - parent = None - def __init__(self, *args, **kwds): - self.args = args - self.kwds = kwds - - def run(self): - fn, = self.fn - fn(*self.args, **self.kwds) - - def __iter__(self): - return self - - def __next__(self): - self.parent = greenlet.getcurrent() - result = self.switch() - if self: - return result - - raise StopIteration - - next = __next__ - - -def Yield(value): - g = greenlet.getcurrent() - while not isinstance(g, genlet): - if g is None: - raise RuntimeError('yield outside a genlet') - g = g.parent - g.parent.switch(value) - - -def generator(func): - class Generator(genlet): - fn = (func,) - return Generator - -# ____________________________________________________________ - - -class GeneratorTests(TestCase): - def test_generator(self): - seen = [] - - def g(n): - for i in range(n): - seen.append(i) - Yield(i) - g = generator(g) - for _ in range(3): - for j in g(5): - seen.append(j) - self.assertEqual(seen, 3 * [0, 0, 1, 1, 2, 2, 3, 3, 4, 4]) diff --git a/myenv/Lib/site-packages/greenlet/tests/test_generator_nested.py b/myenv/Lib/site-packages/greenlet/tests/test_generator_nested.py deleted file mode 100644 index 8d752a6..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_generator_nested.py +++ /dev/null @@ -1,168 +0,0 @@ - -from greenlet import greenlet -from . import TestCase -from .leakcheck import fails_leakcheck - -class genlet(greenlet): - parent = None - def __init__(self, *args, **kwds): - self.args = args - self.kwds = kwds - self.child = None - - def run(self): - # Note the function is packed in a tuple - # to avoid creating a bound method for it. - fn, = self.fn - fn(*self.args, **self.kwds) - - def __iter__(self): - return self - - def set_child(self, child): - self.child = child - - def __next__(self): - if self.child: - child = self.child - while child.child: - tmp = child - child = child.child - tmp.child = None - - result = child.switch() - else: - self.parent = greenlet.getcurrent() - result = self.switch() - - if self: - return result - - raise StopIteration - - next = __next__ - -def Yield(value, level=1): - g = greenlet.getcurrent() - - while level != 0: - if not isinstance(g, genlet): - raise RuntimeError('yield outside a genlet') - if level > 1: - g.parent.set_child(g) - g = g.parent - level -= 1 - - g.switch(value) - - -def Genlet(func): - class TheGenlet(genlet): - fn = (func,) - return TheGenlet - -# ____________________________________________________________ - - -def g1(n, seen): - for i in range(n): - seen.append(i + 1) - yield i - - -def g2(n, seen): - for i in range(n): - seen.append(i + 1) - Yield(i) - -g2 = Genlet(g2) - - -def nested(i): - Yield(i) - - -def g3(n, seen): - for i in range(n): - seen.append(i + 1) - nested(i) -g3 = Genlet(g3) - - -def a(n): - if n == 0: - return - for ii in ax(n - 1): - Yield(ii) - Yield(n) -ax = Genlet(a) - - -def perms(l): - if len(l) > 1: - for e in l: - # No syntactical sugar for generator expressions - x = [Yield([e] + p) for p in perms([x for x in l if x != e])] - assert x - else: - Yield(l) -perms = Genlet(perms) - - -def gr1(n): - for ii in range(1, n): - Yield(ii) - Yield(ii * ii, 2) - -gr1 = Genlet(gr1) - - -def gr2(n, seen): - for ii in gr1(n): - seen.append(ii) - -gr2 = Genlet(gr2) - - -class NestedGeneratorTests(TestCase): - def test_layered_genlets(self): - seen = [] - for ii in gr2(5, seen): - seen.append(ii) - self.assertEqual(seen, [1, 1, 2, 4, 3, 9, 4, 16]) - - @fails_leakcheck - def test_permutations(self): - gen_perms = perms(list(range(4))) - permutations = list(gen_perms) - self.assertEqual(len(permutations), 4 * 3 * 2 * 1) - self.assertIn([0, 1, 2, 3], permutations) - self.assertIn([3, 2, 1, 0], permutations) - res = [] - for ii in zip(perms(list(range(4))), perms(list(range(3)))): - res.append(ii) - self.assertEqual( - res, - [([0, 1, 2, 3], [0, 1, 2]), ([0, 1, 3, 2], [0, 2, 1]), - ([0, 2, 1, 3], [1, 0, 2]), ([0, 2, 3, 1], [1, 2, 0]), - ([0, 3, 1, 2], [2, 0, 1]), ([0, 3, 2, 1], [2, 1, 0])]) - # XXX Test to make sure we are working as a generator expression - - def test_genlet_simple(self): - for g in g1, g2, g3: - seen = [] - for _ in range(3): - for j in g(5, seen): - seen.append(j) - self.assertEqual(seen, 3 * [1, 0, 2, 1, 3, 2, 4, 3, 5, 4]) - - def test_genlet_bad(self): - try: - Yield(10) - except RuntimeError: - pass - - def test_nested_genlets(self): - seen = [] - for ii in ax(5): - seen.append(ii) diff --git a/myenv/Lib/site-packages/greenlet/tests/test_greenlet.py b/myenv/Lib/site-packages/greenlet/tests/test_greenlet.py deleted file mode 100644 index c4aabea..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_greenlet.py +++ /dev/null @@ -1,1324 +0,0 @@ -import gc -import sys -import time -import threading -import unittest - -from abc import ABCMeta -from abc import abstractmethod - -import greenlet -from greenlet import greenlet as RawGreenlet -from . import TestCase -from . import RUNNING_ON_MANYLINUX -from . import PY313 -from .leakcheck import fails_leakcheck - - -# We manually manage locks in many tests -# pylint:disable=consider-using-with -# pylint:disable=too-many-public-methods -# This module is quite large. -# TODO: Refactor into separate test files. For example, -# put all the regression tests that used to produce -# crashes in test_greenlet_no_crash; put tests that DO deliberately crash -# the interpreter into test_greenlet_crash. -# pylint:disable=too-many-lines - -class SomeError(Exception): - pass - - -def fmain(seen): - try: - greenlet.getcurrent().parent.switch() - except: - seen.append(sys.exc_info()[0]) - raise - raise SomeError - - -def send_exception(g, exc): - # note: send_exception(g, exc) can be now done with g.throw(exc). - # the purpose of this test is to explicitly check the propagation rules. - def crasher(exc): - raise exc - g1 = RawGreenlet(crasher, parent=g) - g1.switch(exc) - - -class TestGreenlet(TestCase): - - def _do_simple_test(self): - lst = [] - - def f(): - lst.append(1) - greenlet.getcurrent().parent.switch() - lst.append(3) - g = RawGreenlet(f) - lst.append(0) - g.switch() - lst.append(2) - g.switch() - lst.append(4) - self.assertEqual(lst, list(range(5))) - - def test_simple(self): - self._do_simple_test() - - def test_switch_no_run_raises_AttributeError(self): - g = RawGreenlet() - with self.assertRaises(AttributeError) as exc: - g.switch() - - self.assertIn("run", str(exc.exception)) - - def test_throw_no_run_raises_AttributeError(self): - g = RawGreenlet() - with self.assertRaises(AttributeError) as exc: - g.throw(SomeError) - - self.assertIn("run", str(exc.exception)) - - def test_parent_equals_None(self): - g = RawGreenlet(parent=None) - self.assertIsNotNone(g) - self.assertIs(g.parent, greenlet.getcurrent()) - - def test_run_equals_None(self): - g = RawGreenlet(run=None) - self.assertIsNotNone(g) - self.assertIsNone(g.run) - - def test_two_children(self): - lst = [] - - def f(): - lst.append(1) - greenlet.getcurrent().parent.switch() - lst.extend([1, 1]) - g = RawGreenlet(f) - h = RawGreenlet(f) - g.switch() - self.assertEqual(len(lst), 1) - h.switch() - self.assertEqual(len(lst), 2) - h.switch() - self.assertEqual(len(lst), 4) - self.assertEqual(h.dead, True) - g.switch() - self.assertEqual(len(lst), 6) - self.assertEqual(g.dead, True) - - def test_two_recursive_children(self): - lst = [] - - def f(): - lst.append('b') - greenlet.getcurrent().parent.switch() - - def g(): - lst.append('a') - g = RawGreenlet(f) - g.switch() - lst.append('c') - - g = RawGreenlet(g) - self.assertEqual(sys.getrefcount(g), 2) - g.switch() - self.assertEqual(lst, ['a', 'b', 'c']) - # Just the one in this frame, plus the one on the stack we pass to the function - self.assertEqual(sys.getrefcount(g), 2) - - def test_threads(self): - success = [] - - def f(): - self._do_simple_test() - success.append(True) - ths = [threading.Thread(target=f) for i in range(10)] - for th in ths: - th.start() - for th in ths: - th.join(10) - self.assertEqual(len(success), len(ths)) - - def test_exception(self): - seen = [] - g1 = RawGreenlet(fmain) - g2 = RawGreenlet(fmain) - g1.switch(seen) - g2.switch(seen) - g2.parent = g1 - - self.assertEqual(seen, []) - #with self.assertRaises(SomeError): - # p("***Switching back") - # g2.switch() - # Creating this as a bound method can reveal bugs that - # are hidden on newer versions of Python that avoid creating - # bound methods for direct expressions; IOW, don't use the `with` - # form! - self.assertRaises(SomeError, g2.switch) - self.assertEqual(seen, [SomeError]) - - value = g2.switch() - self.assertEqual(value, ()) - self.assertEqual(seen, [SomeError]) - - value = g2.switch(25) - self.assertEqual(value, 25) - self.assertEqual(seen, [SomeError]) - - - def test_send_exception(self): - seen = [] - g1 = RawGreenlet(fmain) - g1.switch(seen) - self.assertRaises(KeyError, send_exception, g1, KeyError) - self.assertEqual(seen, [KeyError]) - - def test_dealloc(self): - seen = [] - g1 = RawGreenlet(fmain) - g2 = RawGreenlet(fmain) - g1.switch(seen) - g2.switch(seen) - self.assertEqual(seen, []) - del g1 - gc.collect() - self.assertEqual(seen, [greenlet.GreenletExit]) - del g2 - gc.collect() - self.assertEqual(seen, [greenlet.GreenletExit, greenlet.GreenletExit]) - - def test_dealloc_catches_GreenletExit_throws_other(self): - def run(): - try: - greenlet.getcurrent().parent.switch() - except greenlet.GreenletExit: - raise SomeError from None - - g = RawGreenlet(run) - g.switch() - # Destroying the only reference to the greenlet causes it - # to get GreenletExit; when it in turn raises, even though we're the parent - # we don't get the exception, it just gets printed. - # When we run on 3.8 only, we can use sys.unraisablehook - oldstderr = sys.stderr - from io import StringIO - stderr = sys.stderr = StringIO() - try: - del g - finally: - sys.stderr = oldstderr - - v = stderr.getvalue() - self.assertIn("Exception", v) - self.assertIn('ignored', v) - self.assertIn("SomeError", v) - - - @unittest.skipIf( - PY313 and RUNNING_ON_MANYLINUX, - "Sometimes flaky (getting one GreenletExit in the second list)" - # Probably due to funky timing interactions? - # TODO: FIXME Make that work. - ) - - def test_dealloc_other_thread(self): - seen = [] - someref = [] - - bg_glet_created_running_and_no_longer_ref_in_bg = threading.Event() - fg_ref_released = threading.Event() - bg_should_be_clear = threading.Event() - ok_to_exit_bg_thread = threading.Event() - - def f(): - g1 = RawGreenlet(fmain) - g1.switch(seen) - someref.append(g1) - del g1 - gc.collect() - - bg_glet_created_running_and_no_longer_ref_in_bg.set() - fg_ref_released.wait(3) - - RawGreenlet() # trigger release - bg_should_be_clear.set() - ok_to_exit_bg_thread.wait(3) - RawGreenlet() # One more time - - t = threading.Thread(target=f) - t.start() - bg_glet_created_running_and_no_longer_ref_in_bg.wait(10) - - self.assertEqual(seen, []) - self.assertEqual(len(someref), 1) - del someref[:] - gc.collect() - # g1 is not released immediately because it's from another thread - self.assertEqual(seen, []) - fg_ref_released.set() - bg_should_be_clear.wait(3) - try: - self.assertEqual(seen, [greenlet.GreenletExit]) - finally: - ok_to_exit_bg_thread.set() - t.join(10) - del seen[:] - del someref[:] - - def test_frame(self): - def f1(): - f = sys._getframe(0) # pylint:disable=protected-access - self.assertEqual(f.f_back, None) - greenlet.getcurrent().parent.switch(f) - return "meaning of life" - g = RawGreenlet(f1) - frame = g.switch() - self.assertTrue(frame is g.gr_frame) - self.assertTrue(g) - - from_g = g.switch() - self.assertFalse(g) - self.assertEqual(from_g, 'meaning of life') - self.assertEqual(g.gr_frame, None) - - def test_thread_bug(self): - def runner(x): - g = RawGreenlet(lambda: time.sleep(x)) - g.switch() - t1 = threading.Thread(target=runner, args=(0.2,)) - t2 = threading.Thread(target=runner, args=(0.3,)) - t1.start() - t2.start() - t1.join(10) - t2.join(10) - - def test_switch_kwargs(self): - def run(a, b): - self.assertEqual(a, 4) - self.assertEqual(b, 2) - return 42 - x = RawGreenlet(run).switch(a=4, b=2) - self.assertEqual(x, 42) - - def test_switch_kwargs_to_parent(self): - def run(x): - greenlet.getcurrent().parent.switch(x=x) - greenlet.getcurrent().parent.switch(2, x=3) - return x, x ** 2 - g = RawGreenlet(run) - self.assertEqual({'x': 3}, g.switch(3)) - self.assertEqual(((2,), {'x': 3}), g.switch()) - self.assertEqual((3, 9), g.switch()) - - def test_switch_to_another_thread(self): - data = {} - created_event = threading.Event() - done_event = threading.Event() - - def run(): - data['g'] = RawGreenlet(lambda: None) - created_event.set() - done_event.wait(10) - thread = threading.Thread(target=run) - thread.start() - created_event.wait(10) - with self.assertRaises(greenlet.error): - data['g'].switch() - done_event.set() - thread.join(10) - # XXX: Should handle this automatically - data.clear() - - def test_exc_state(self): - def f(): - try: - raise ValueError('fun') - except: # pylint:disable=bare-except - exc_info = sys.exc_info() - RawGreenlet(h).switch() - self.assertEqual(exc_info, sys.exc_info()) - - def h(): - self.assertEqual(sys.exc_info(), (None, None, None)) - - RawGreenlet(f).switch() - - def test_instance_dict(self): - def f(): - greenlet.getcurrent().test = 42 - def deldict(g): - del g.__dict__ - def setdict(g, value): - g.__dict__ = value - g = RawGreenlet(f) - self.assertEqual(g.__dict__, {}) - g.switch() - self.assertEqual(g.test, 42) - self.assertEqual(g.__dict__, {'test': 42}) - g.__dict__ = g.__dict__ - self.assertEqual(g.__dict__, {'test': 42}) - self.assertRaises(TypeError, deldict, g) - self.assertRaises(TypeError, setdict, g, 42) - - def test_running_greenlet_has_no_run(self): - has_run = [] - def func(): - has_run.append( - hasattr(greenlet.getcurrent(), 'run') - ) - - g = RawGreenlet(func) - g.switch() - self.assertEqual(has_run, [False]) - - def test_deepcopy(self): - import copy - self.assertRaises(TypeError, copy.copy, RawGreenlet()) - self.assertRaises(TypeError, copy.deepcopy, RawGreenlet()) - - def test_parent_restored_on_kill(self): - hub = RawGreenlet(lambda: None) - main = greenlet.getcurrent() - result = [] - def worker(): - try: - # Wait to be killed by going back to the test. - main.switch() - except greenlet.GreenletExit: - # Resurrect and switch to parent - result.append(greenlet.getcurrent().parent) - result.append(greenlet.getcurrent()) - hub.switch() - g = RawGreenlet(worker, parent=hub) - g.switch() - # delete the only reference, thereby raising GreenletExit - del g - self.assertTrue(result) - self.assertIs(result[0], main) - self.assertIs(result[1].parent, hub) - # Delete them, thereby breaking the cycle between the greenlet - # and the frame, which otherwise would never be collectable - # XXX: We should be able to automatically fix this. - del result[:] - hub = None - main = None - - def test_parent_return_failure(self): - # No run causes AttributeError on switch - g1 = RawGreenlet() - # Greenlet that implicitly switches to parent - g2 = RawGreenlet(lambda: None, parent=g1) - # AttributeError should propagate to us, no fatal errors - with self.assertRaises(AttributeError): - g2.switch() - - def test_throw_exception_not_lost(self): - class mygreenlet(RawGreenlet): - def __getattribute__(self, name): - try: - raise Exception # pylint:disable=broad-exception-raised - except: # pylint:disable=bare-except - pass - return RawGreenlet.__getattribute__(self, name) - g = mygreenlet(lambda: None) - self.assertRaises(SomeError, g.throw, SomeError()) - - @fails_leakcheck - def _do_test_throw_to_dead_thread_doesnt_crash(self, wait_for_cleanup=False): - result = [] - def worker(): - greenlet.getcurrent().parent.switch() - - def creator(): - g = RawGreenlet(worker) - g.switch() - result.append(g) - if wait_for_cleanup: - # Let this greenlet eventually be cleaned up. - g.switch() - greenlet.getcurrent() - t = threading.Thread(target=creator) - t.start() - t.join(10) - del t - # But, depending on the operating system, the thread - # deallocator may not actually have run yet! So we can't be - # sure about the error message unless we wait. - if wait_for_cleanup: - self.wait_for_pending_cleanups() - with self.assertRaises(greenlet.error) as exc: - result[0].throw(SomeError) - - if not wait_for_cleanup: - s = str(exc.exception) - self.assertTrue( - s == "cannot switch to a different thread (which happens to have exited)" - or 'Cannot switch' in s - ) - else: - self.assertEqual( - str(exc.exception), - "cannot switch to a different thread (which happens to have exited)", - ) - - if hasattr(result[0].gr_frame, 'clear'): - # The frame is actually executing (it thinks), we can't clear it. - with self.assertRaises(RuntimeError): - result[0].gr_frame.clear() - # Unfortunately, this doesn't actually clear the references, they're in the - # fast local array. - if not wait_for_cleanup: - # f_locals has no clear method in Python 3.13 - if hasattr(result[0].gr_frame.f_locals, 'clear'): - result[0].gr_frame.f_locals.clear() - else: - self.assertIsNone(result[0].gr_frame) - - del creator - worker = None - del result[:] - # XXX: we ought to be able to automatically fix this. - # See issue 252 - self.expect_greenlet_leak = True # direct us not to wait for it to go away - - @fails_leakcheck - def test_throw_to_dead_thread_doesnt_crash(self): - self._do_test_throw_to_dead_thread_doesnt_crash() - - def test_throw_to_dead_thread_doesnt_crash_wait(self): - self._do_test_throw_to_dead_thread_doesnt_crash(True) - - @fails_leakcheck - def test_recursive_startup(self): - class convoluted(RawGreenlet): - def __init__(self): - RawGreenlet.__init__(self) - self.count = 0 - def __getattribute__(self, name): - if name == 'run' and self.count == 0: - self.count = 1 - self.switch(43) - return RawGreenlet.__getattribute__(self, name) - def run(self, value): - while True: - self.parent.switch(value) - g = convoluted() - self.assertEqual(g.switch(42), 43) - # Exits the running greenlet, otherwise it leaks - # XXX: We should be able to automatically fix this - #g.throw(greenlet.GreenletExit) - #del g - self.expect_greenlet_leak = True - - def test_threaded_updatecurrent(self): - # released when main thread should execute - lock1 = threading.Lock() - lock1.acquire() - # released when another thread should execute - lock2 = threading.Lock() - lock2.acquire() - class finalized(object): - def __del__(self): - # happens while in green_updatecurrent() in main greenlet - # should be very careful not to accidentally call it again - # at the same time we must make sure another thread executes - lock2.release() - lock1.acquire() - # now ts_current belongs to another thread - def deallocator(): - greenlet.getcurrent().parent.switch() - def fthread(): - lock2.acquire() - greenlet.getcurrent() - del g[0] - lock1.release() - lock2.acquire() - greenlet.getcurrent() - lock1.release() - main = greenlet.getcurrent() - g = [RawGreenlet(deallocator)] - g[0].bomb = finalized() - g[0].switch() - t = threading.Thread(target=fthread) - t.start() - # let another thread grab ts_current and deallocate g[0] - lock2.release() - lock1.acquire() - # this is the corner stone - # getcurrent() will notice that ts_current belongs to another thread - # and start the update process, which would notice that g[0] should - # be deallocated, and that will execute an object's finalizer. Now, - # that object will let another thread run so it can grab ts_current - # again, which would likely crash the interpreter if there's no - # check for this case at the end of green_updatecurrent(). This test - # passes if getcurrent() returns correct result, but it's likely - # to randomly crash if it's not anyway. - self.assertEqual(greenlet.getcurrent(), main) - # wait for another thread to complete, just in case - t.join(10) - - def test_dealloc_switch_args_not_lost(self): - seen = [] - def worker(): - # wait for the value - value = greenlet.getcurrent().parent.switch() - # delete all references to ourself - del worker[0] - initiator.parent = greenlet.getcurrent().parent - # switch to main with the value, but because - # ts_current is the last reference to us we - # return here immediately, where we resurrect ourself. - try: - greenlet.getcurrent().parent.switch(value) - finally: - seen.append(greenlet.getcurrent()) - def initiator(): - return 42 # implicitly falls thru to parent - - worker = [RawGreenlet(worker)] - - worker[0].switch() # prime worker - initiator = RawGreenlet(initiator, worker[0]) - value = initiator.switch() - self.assertTrue(seen) - self.assertEqual(value, 42) - - def test_tuple_subclass(self): - # The point of this test is to see what happens when a custom - # tuple subclass is used as an object passed directly to the C - # function ``green_switch``; part of ``green_switch`` checks - # the ``len()`` of the ``args`` tuple, and that can call back - # into Python. Here, when it calls back into Python, we - # recursively enter ``green_switch`` again. - - # This test is really only relevant on Python 2. The builtin - # `apply` function directly passes the given args tuple object - # to the underlying function, whereas the Python 3 version - # unpacks and repacks into an actual tuple. This could still - # happen using the C API on Python 3 though. We should write a - # builtin version of apply() ourself. - def _apply(func, a, k): - func(*a, **k) - - class mytuple(tuple): - def __len__(self): - greenlet.getcurrent().switch() - return tuple.__len__(self) - args = mytuple() - kwargs = dict(a=42) - def switchapply(): - _apply(greenlet.getcurrent().parent.switch, args, kwargs) - g = RawGreenlet(switchapply) - self.assertEqual(g.switch(), kwargs) - - def test_abstract_subclasses(self): - AbstractSubclass = ABCMeta( - 'AbstractSubclass', - (RawGreenlet,), - {'run': abstractmethod(lambda self: None)}) - - class BadSubclass(AbstractSubclass): - pass - - class GoodSubclass(AbstractSubclass): - def run(self): - pass - - GoodSubclass() # should not raise - self.assertRaises(TypeError, BadSubclass) - - def test_implicit_parent_with_threads(self): - if not gc.isenabled(): - return # cannot test with disabled gc - N = gc.get_threshold()[0] - if N < 50: - return # cannot test with such a small N - def attempt(): - lock1 = threading.Lock() - lock1.acquire() - lock2 = threading.Lock() - lock2.acquire() - recycled = [False] - def another_thread(): - lock1.acquire() # wait for gc - greenlet.getcurrent() # update ts_current - lock2.release() # release gc - t = threading.Thread(target=another_thread) - t.start() - class gc_callback(object): - def __del__(self): - lock1.release() - lock2.acquire() - recycled[0] = True - class garbage(object): - def __init__(self): - self.cycle = self - self.callback = gc_callback() - l = [] - x = range(N*2) - current = greenlet.getcurrent() - g = garbage() - for _ in x: - g = None # lose reference to garbage - if recycled[0]: - # gc callback called prematurely - t.join(10) - return False - last = RawGreenlet() - if recycled[0]: - break # yes! gc called in green_new - l.append(last) # increase allocation counter - else: - # gc callback not called when expected - gc.collect() - if recycled[0]: - t.join(10) - return False - self.assertEqual(last.parent, current) - for g in l: - self.assertEqual(g.parent, current) - return True - for _ in range(5): - if attempt(): - break - - def test_issue_245_reference_counting_subclass_no_threads(self): - # https://github.com/python-greenlet/greenlet/issues/245 - # Before the fix, this crashed pretty reliably on - # Python 3.10, at least on macOS; but much less reliably on other - # interpreters (memory layout must have changed). - # The threaded test crashed more reliably on more interpreters. - from greenlet import getcurrent - from greenlet import GreenletExit - - class Greenlet(RawGreenlet): - pass - - initial_refs = sys.getrefcount(Greenlet) - # This has to be an instance variable because - # Python 2 raises a SyntaxError if we delete a local - # variable referenced in an inner scope. - self.glets = [] # pylint:disable=attribute-defined-outside-init - - def greenlet_main(): - try: - getcurrent().parent.switch() - except GreenletExit: - self.glets.append(getcurrent()) - - # Before the - for _ in range(10): - Greenlet(greenlet_main).switch() - - del self.glets - self.assertEqual(sys.getrefcount(Greenlet), initial_refs) - - @unittest.skipIf( - PY313 and RUNNING_ON_MANYLINUX, - "The manylinux images appear to hang on this test on 3.13rc2" - # Or perhaps I just got tired of waiting for the 450s timeout. - # Still, it shouldn't take anywhere near that long. Does not reproduce in - # Ubuntu images, on macOS or Windows. - ) - def test_issue_245_reference_counting_subclass_threads(self): - # https://github.com/python-greenlet/greenlet/issues/245 - from threading import Thread - from threading import Event - - from greenlet import getcurrent - - class MyGreenlet(RawGreenlet): - pass - - glets = [] - ref_cleared = Event() - - def greenlet_main(): - getcurrent().parent.switch() - - def thread_main(greenlet_running_event): - mine = MyGreenlet(greenlet_main) - glets.append(mine) - # The greenlets being deleted must be active - mine.switch() - # Don't keep any reference to it in this thread - del mine - # Let main know we published our greenlet. - greenlet_running_event.set() - # Wait for main to let us know the references are - # gone and the greenlet objects no longer reachable - ref_cleared.wait(10) - # The creating thread must call getcurrent() (or a few other - # greenlet APIs) because that's when the thread-local list of dead - # greenlets gets cleared. - getcurrent() - - # We start with 3 references to the subclass: - # - This module - # - Its __mro__ - # - The __subclassess__ attribute of greenlet - # - (If we call gc.get_referents(), we find four entries, including - # some other tuple ``(greenlet)`` that I'm not sure about but must be part - # of the machinery.) - # - # On Python 3.10 it's often enough to just run 3 threads; on Python 2.7, - # more threads are needed, and the results are still - # non-deterministic. Presumably the memory layouts are different - initial_refs = sys.getrefcount(MyGreenlet) - thread_ready_events = [] - for _ in range( - initial_refs + 45 - ): - event = Event() - thread = Thread(target=thread_main, args=(event,)) - thread_ready_events.append(event) - thread.start() - - - for done_event in thread_ready_events: - done_event.wait(10) - - - del glets[:] - ref_cleared.set() - # Let any other thread run; it will crash the interpreter - # if not fixed (or silently corrupt memory and we possibly crash - # later). - self.wait_for_pending_cleanups() - self.assertEqual(sys.getrefcount(MyGreenlet), initial_refs) - - def test_falling_off_end_switches_to_unstarted_parent_raises_error(self): - def no_args(): - return 13 - - parent_never_started = RawGreenlet(no_args) - - def leaf(): - return 42 - - child = RawGreenlet(leaf, parent_never_started) - - # Because the run function takes to arguments - with self.assertRaises(TypeError): - child.switch() - - def test_falling_off_end_switches_to_unstarted_parent_works(self): - def one_arg(x): - return (x, 24) - - parent_never_started = RawGreenlet(one_arg) - - def leaf(): - return 42 - - child = RawGreenlet(leaf, parent_never_started) - - result = child.switch() - self.assertEqual(result, (42, 24)) - - def test_switch_to_dead_greenlet_with_unstarted_perverse_parent(self): - class Parent(RawGreenlet): - def __getattribute__(self, name): - if name == 'run': - raise SomeError - - - parent_never_started = Parent() - seen = [] - child = RawGreenlet(lambda: seen.append(42), parent_never_started) - # Because we automatically start the parent when the child is - # finished - with self.assertRaises(SomeError): - child.switch() - - self.assertEqual(seen, [42]) - - with self.assertRaises(SomeError): - child.switch() - self.assertEqual(seen, [42]) - - def test_switch_to_dead_greenlet_reparent(self): - seen = [] - parent_never_started = RawGreenlet(lambda: seen.append(24)) - child = RawGreenlet(lambda: seen.append(42)) - - child.switch() - self.assertEqual(seen, [42]) - - child.parent = parent_never_started - # This actually is the same as switching to the parent. - result = child.switch() - self.assertIsNone(result) - self.assertEqual(seen, [42, 24]) - - def test_can_access_f_back_of_suspended_greenlet(self): - # This tests our frame rewriting to work around Python 3.12+ having - # some interpreter frames on the C stack. It will crash in the absence - # of that logic. - main = greenlet.getcurrent() - - def outer(): - inner() - - def inner(): - main.switch(sys._getframe(0)) - - hub = RawGreenlet(outer) - # start it - hub.switch() - - # start another greenlet to make sure we aren't relying on - # anything in `hub` still being on the C stack - unrelated = RawGreenlet(lambda: None) - unrelated.switch() - - # now it is suspended - self.assertIsNotNone(hub.gr_frame) - self.assertEqual(hub.gr_frame.f_code.co_name, "inner") - self.assertIsNotNone(hub.gr_frame.f_back) - self.assertEqual(hub.gr_frame.f_back.f_code.co_name, "outer") - # The next line is what would crash - self.assertIsNone(hub.gr_frame.f_back.f_back) - - def test_get_stack_with_nested_c_calls(self): - from functools import partial - from . import _test_extension_cpp - - def recurse(v): - if v > 0: - return v * _test_extension_cpp.test_call(partial(recurse, v - 1)) - return greenlet.getcurrent().parent.switch() - - gr = RawGreenlet(recurse) - gr.switch(5) - frame = gr.gr_frame - for i in range(5): - self.assertEqual(frame.f_locals["v"], i) - frame = frame.f_back - self.assertEqual(frame.f_locals["v"], 5) - self.assertIsNone(frame.f_back) - self.assertEqual(gr.switch(10), 1200) # 1200 = 5! * 10 - - def test_frames_always_exposed(self): - # On Python 3.12 this will crash if we don't set the - # gr_frames_always_exposed attribute. More background: - # https://github.com/python-greenlet/greenlet/issues/388 - main = greenlet.getcurrent() - - def outer(): - inner(sys._getframe(0)) - - def inner(frame): - main.switch(frame) - - gr = RawGreenlet(outer) - frame = gr.switch() - - # Do something else to clobber the part of the C stack used by `gr`, - # so we can't skate by on "it just happened to still be there" - unrelated = RawGreenlet(lambda: None) - unrelated.switch() - - self.assertEqual(frame.f_code.co_name, "outer") - # The next line crashes on 3.12 if we haven't exposed the frames. - self.assertIsNone(frame.f_back) - - -class TestGreenletSetParentErrors(TestCase): - def test_threaded_reparent(self): - data = {} - created_event = threading.Event() - done_event = threading.Event() - - def run(): - data['g'] = RawGreenlet(lambda: None) - created_event.set() - done_event.wait(10) - - def blank(): - greenlet.getcurrent().parent.switch() - - thread = threading.Thread(target=run) - thread.start() - created_event.wait(10) - g = RawGreenlet(blank) - g.switch() - with self.assertRaises(ValueError) as exc: - g.parent = data['g'] - done_event.set() - thread.join(10) - - self.assertEqual(str(exc.exception), "parent cannot be on a different thread") - - def test_unexpected_reparenting(self): - another = [] - def worker(): - g = RawGreenlet(lambda: None) - another.append(g) - g.switch() - t = threading.Thread(target=worker) - t.start() - t.join(10) - # The first time we switch (running g_initialstub(), which is - # when we look up the run attribute) we attempt to change the - # parent to one from another thread (which also happens to be - # dead). ``g_initialstub()`` should detect this and raise a - # greenlet error. - # - # EXCEPT: With the fix for #252, this is actually detected - # sooner, when setting the parent itself. Prior to that fix, - # the main greenlet from the background thread kept a valid - # value for ``run_info``, and appeared to be a valid parent - # until we actually started the greenlet. But now that it's - # cleared, this test is catching whether ``green_setparent`` - # can detect the dead thread. - # - # Further refactoring once again changes this back to a greenlet.error - # - # We need to wait for the cleanup to happen, but we're - # deliberately leaking a main greenlet here. - self.wait_for_pending_cleanups(initial_main_greenlets=self.main_greenlets_before_test + 1) - - class convoluted(RawGreenlet): - def __getattribute__(self, name): - if name == 'run': - self.parent = another[0] # pylint:disable=attribute-defined-outside-init - return RawGreenlet.__getattribute__(self, name) - g = convoluted(lambda: None) - with self.assertRaises(greenlet.error) as exc: - g.switch() - self.assertEqual(str(exc.exception), - "cannot switch to a different thread (which happens to have exited)") - del another[:] - - def test_unexpected_reparenting_thread_running(self): - # Like ``test_unexpected_reparenting``, except the background thread is - # actually still alive. - another = [] - switched_to_greenlet = threading.Event() - keep_main_alive = threading.Event() - def worker(): - g = RawGreenlet(lambda: None) - another.append(g) - g.switch() - switched_to_greenlet.set() - keep_main_alive.wait(10) - class convoluted(RawGreenlet): - def __getattribute__(self, name): - if name == 'run': - self.parent = another[0] # pylint:disable=attribute-defined-outside-init - return RawGreenlet.__getattribute__(self, name) - - t = threading.Thread(target=worker) - t.start() - - switched_to_greenlet.wait(10) - try: - g = convoluted(lambda: None) - - with self.assertRaises(greenlet.error) as exc: - g.switch() - self.assertIn("Cannot switch to a different thread", str(exc.exception)) - self.assertIn("Expected", str(exc.exception)) - self.assertIn("Current", str(exc.exception)) - finally: - keep_main_alive.set() - t.join(10) - # XXX: Should handle this automatically. - del another[:] - - def test_cannot_delete_parent(self): - worker = RawGreenlet(lambda: None) - self.assertIs(worker.parent, greenlet.getcurrent()) - - with self.assertRaises(AttributeError) as exc: - del worker.parent - self.assertEqual(str(exc.exception), "can't delete attribute") - - def test_cannot_delete_parent_of_main(self): - with self.assertRaises(AttributeError) as exc: - del greenlet.getcurrent().parent - self.assertEqual(str(exc.exception), "can't delete attribute") - - - def test_main_greenlet_parent_is_none(self): - # assuming we're in a main greenlet here. - self.assertIsNone(greenlet.getcurrent().parent) - - def test_set_parent_wrong_types(self): - def bg(): - # Go back to main. - greenlet.getcurrent().parent.switch() - - def check(glet): - for p in None, 1, self, "42": - with self.assertRaises(TypeError) as exc: - glet.parent = p - - self.assertEqual( - str(exc.exception), - "GreenletChecker: Expected any type of greenlet, not " + type(p).__name__) - - # First, not running - g = RawGreenlet(bg) - self.assertFalse(g) - check(g) - - # Then when running. - g.switch() - self.assertTrue(g) - check(g) - - # Let it finish - g.switch() - - - def test_trivial_cycle(self): - glet = RawGreenlet(lambda: None) - with self.assertRaises(ValueError) as exc: - glet.parent = glet - self.assertEqual(str(exc.exception), "cyclic parent chain") - - def test_trivial_cycle_main(self): - # This used to produce a ValueError, but we catch it earlier than that now. - with self.assertRaises(AttributeError) as exc: - greenlet.getcurrent().parent = greenlet.getcurrent() - self.assertEqual(str(exc.exception), "cannot set the parent of a main greenlet") - - def test_deeper_cycle(self): - g1 = RawGreenlet(lambda: None) - g2 = RawGreenlet(lambda: None) - g3 = RawGreenlet(lambda: None) - - g1.parent = g2 - g2.parent = g3 - with self.assertRaises(ValueError) as exc: - g3.parent = g1 - self.assertEqual(str(exc.exception), "cyclic parent chain") - - -class TestRepr(TestCase): - - def assertEndsWith(self, got, suffix): - self.assertTrue(got.endswith(suffix), (got, suffix)) - - def test_main_while_running(self): - r = repr(greenlet.getcurrent()) - self.assertEndsWith(r, " current active started main>") - - def test_main_in_background(self): - main = greenlet.getcurrent() - def run(): - return repr(main) - - g = RawGreenlet(run) - r = g.switch() - self.assertEndsWith(r, ' suspended active started main>') - - def test_initial(self): - r = repr(RawGreenlet()) - self.assertEndsWith(r, ' pending>') - - def test_main_from_other_thread(self): - main = greenlet.getcurrent() - - class T(threading.Thread): - original_main = thread_main = None - main_glet = None - def run(self): - self.original_main = repr(main) - self.main_glet = greenlet.getcurrent() - self.thread_main = repr(self.main_glet) - - t = T() - t.start() - t.join(10) - - self.assertEndsWith(t.original_main, ' suspended active started main>') - self.assertEndsWith(t.thread_main, ' current active started main>') - # give the machinery time to notice the death of the thread, - # and clean it up. Note that we don't use - # ``expect_greenlet_leak`` or wait_for_pending_cleanups, - # because at this point we know we have an extra greenlet - # still reachable. - for _ in range(3): - time.sleep(0.001) - - # In the past, main greenlets, even from dead threads, never - # really appear dead. We have fixed that, and we also report - # that the thread is dead in the repr. (Do this multiple times - # to make sure that we don't self-modify and forget our state - # in the C++ code). - for _ in range(3): - self.assertTrue(t.main_glet.dead) - r = repr(t.main_glet) - self.assertEndsWith(r, ' (thread exited) dead>') - - def test_dead(self): - g = RawGreenlet(lambda: None) - g.switch() - self.assertEndsWith(repr(g), ' dead>') - self.assertNotIn('suspended', repr(g)) - self.assertNotIn('started', repr(g)) - self.assertNotIn('active', repr(g)) - - def test_formatting_produces_native_str(self): - # https://github.com/python-greenlet/greenlet/issues/218 - # %s formatting on Python 2 was producing unicode, not str. - - g_dead = RawGreenlet(lambda: None) - g_not_started = RawGreenlet(lambda: None) - g_cur = greenlet.getcurrent() - - for g in g_dead, g_not_started, g_cur: - - self.assertIsInstance( - '%s' % (g,), - str - ) - self.assertIsInstance( - '%r' % (g,), - str, - ) - - -class TestMainGreenlet(TestCase): - # Tests some implementation details, and relies on some - # implementation details. - - def _check_current_is_main(self): - # implementation detail - assert 'main' in repr(greenlet.getcurrent()) - - t = type(greenlet.getcurrent()) - assert 'main' not in repr(t) - return t - - def test_main_greenlet_type_can_be_subclassed(self): - main_type = self._check_current_is_main() - subclass = type('subclass', (main_type,), {}) - self.assertIsNotNone(subclass) - - def test_main_greenlet_is_greenlet(self): - self._check_current_is_main() - self.assertIsInstance(greenlet.getcurrent(), RawGreenlet) - - - -class TestBrokenGreenlets(TestCase): - # Tests for things that used to, or still do, terminate the interpreter. - # This often means doing unsavory things. - - def test_failed_to_initialstub(self): - def func(): - raise AssertionError("Never get here") - - - g = greenlet._greenlet.UnswitchableGreenlet(func) - g.force_switch_error = True - - with self.assertRaisesRegex(SystemError, - "Failed to switch stacks into a greenlet for the first time."): - g.switch() - - def test_failed_to_switch_into_running(self): - runs = [] - def func(): - runs.append(1) - greenlet.getcurrent().parent.switch() - runs.append(2) - greenlet.getcurrent().parent.switch() - runs.append(3) # pragma: no cover - - g = greenlet._greenlet.UnswitchableGreenlet(func) - g.switch() - self.assertEqual(runs, [1]) - g.switch() - self.assertEqual(runs, [1, 2]) - g.force_switch_error = True - - with self.assertRaisesRegex(SystemError, - "Failed to switch stacks into a running greenlet."): - g.switch() - - # If we stopped here, we would fail the leakcheck, because we've left - # the ``inner_bootstrap()`` C frame and its descendents hanging around, - # which have a bunch of Python references. They'll never get cleaned up - # if we don't let the greenlet finish. - g.force_switch_error = False - g.switch() - self.assertEqual(runs, [1, 2, 3]) - - def test_failed_to_slp_switch_into_running(self): - ex = self.assertScriptRaises('fail_slp_switch.py') - - self.assertIn('fail_slp_switch is running', ex.output) - self.assertIn(ex.returncode, self.get_expected_returncodes_for_aborted_process()) - - def test_reentrant_switch_two_greenlets(self): - # Before we started capturing the arguments in g_switch_finish, this could crash. - output = self.run_script('fail_switch_two_greenlets.py') - self.assertIn('In g1_run', output) - self.assertIn('TRACE', output) - self.assertIn('LEAVE TRACE', output) - self.assertIn('Falling off end of main', output) - self.assertIn('Falling off end of g1_run', output) - self.assertIn('Falling off end of g2', output) - - def test_reentrant_switch_three_greenlets(self): - # On debug builds of greenlet, this used to crash with an assertion error; - # on non-debug versions, it ran fine (which it should not do!). - # Now it always crashes correctly with a TypeError - ex = self.assertScriptRaises('fail_switch_three_greenlets.py', exitcodes=(1,)) - - self.assertIn('TypeError', ex.output) - self.assertIn('positional arguments', ex.output) - - def test_reentrant_switch_three_greenlets2(self): - # This actually passed on debug and non-debug builds. It - # should probably have been triggering some debug assertions - # but it didn't. - # - # I think the fixes for the above test also kicked in here. - output = self.run_script('fail_switch_three_greenlets2.py') - self.assertIn( - "RESULTS: [('trace', 'switch'), " - "('trace', 'switch'), ('g2 arg', 'g2 from tracefunc'), " - "('trace', 'switch'), ('main g1', 'from g2_run'), ('trace', 'switch'), " - "('g1 arg', 'g1 from main'), ('trace', 'switch'), ('main g2', 'from g1_run'), " - "('trace', 'switch'), ('g1 from parent', 'g1 from main 2'), ('trace', 'switch'), " - "('main g1.2', 'g1 done'), ('trace', 'switch'), ('g2 from parent', ()), " - "('trace', 'switch'), ('main g2.2', 'g2 done')]", - output - ) - - def test_reentrant_switch_GreenletAlreadyStartedInPython(self): - output = self.run_script('fail_initialstub_already_started.py') - - self.assertIn( - "RESULTS: ['Begin C', 'Switch to b from B.__getattribute__ in C', " - "('Begin B', ()), '_B_run switching to main', ('main from c', 'From B'), " - "'B.__getattribute__ back from main in C', ('Begin A', (None,)), " - "('A dead?', True, 'B dead?', True, 'C dead?', False), " - "'C done', ('main from c.2', None)]", - output - ) - - def test_reentrant_switch_run_callable_has_del(self): - output = self.run_script('fail_clearing_run_switches.py') - self.assertIn( - "RESULTS [" - "('G.__getattribute__', 'run'), ('RunCallable', '__del__'), " - "('main: g.switch()', 'from RunCallable'), ('run_func', 'enter')" - "]", - output - ) - -if __name__ == '__main__': - unittest.main() diff --git a/myenv/Lib/site-packages/greenlet/tests/test_greenlet_trash.py b/myenv/Lib/site-packages/greenlet/tests/test_greenlet_trash.py deleted file mode 100644 index c1fc137..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_greenlet_trash.py +++ /dev/null @@ -1,187 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Tests for greenlets interacting with the CPython trash can API. - -The CPython trash can API is not designed to be re-entered from a -single thread. But this can happen using greenlets, if something -during the object deallocation process switches greenlets, and this second -greenlet then causes the trash can to get entered again. Here, we do this -very explicitly, but in other cases (like gevent) it could be arbitrarily more -complicated: for example, a weakref callback might try to acquire a lock that's -already held by another greenlet; that would allow a greenlet switch to occur. - -See https://github.com/gevent/gevent/issues/1909 - -This test is fragile and relies on details of the CPython -implementation (like most of the rest of this package): - - - We enter the trashcan and deferred deallocation after - ``_PyTrash_UNWIND_LEVEL`` calls. This constant, defined in - CPython's object.c, is generally 50. That's basically how many objects are required to - get us into the deferred deallocation situation. - - - The test fails by hitting an ``assert()`` in object.c; if the - build didn't enable assert, then we don't catch this. - - - If the test fails in that way, the interpreter crashes. -""" -from __future__ import print_function, absolute_import, division - -import unittest - - -class TestTrashCanReEnter(unittest.TestCase): - - def test_it(self): - try: - # pylint:disable-next=no-name-in-module - from greenlet._greenlet import get_tstate_trash_delete_nesting # pylint:disable=unused-import - except ImportError: - import sys - # Python 3.13 has not "trash delete nesting" anymore (but "delete later") - assert sys.version_info[:2] >= (3, 13) - self.skipTest("get_tstate_trash_delete_nesting is not available.") - - # Try several times to trigger it, because it isn't 100% - # reliable. - for _ in range(10): - self.check_it() - - def check_it(self): # pylint:disable=too-many-statements - import greenlet - from greenlet._greenlet import get_tstate_trash_delete_nesting # pylint:disable=no-name-in-module - main = greenlet.getcurrent() - - assert get_tstate_trash_delete_nesting() == 0 - - # We expect to be in deferred deallocation after this many - # deallocations have occurred. TODO: I wish we had a better way to do - # this --- that was before get_tstate_trash_delete_nesting; perhaps - # we can use that API to do better? - TRASH_UNWIND_LEVEL = 50 - # How many objects to put in a container; it's the container that - # queues objects for deferred deallocation. - OBJECTS_PER_CONTAINER = 500 - - class Dealloc: # define the class here because we alter class variables each time we run. - """ - An object with a ``__del__`` method. When it starts getting deallocated - from a deferred trash can run, it switches greenlets, allocates more objects - which then also go in the trash can. If we don't save state appropriately, - nesting gets out of order and we can crash the interpreter. - """ - - #: Has our deallocation actually run and switched greenlets? - #: When it does, this will be set to the current greenlet. This should - #: be happening in the main greenlet, so we check that down below. - SPAWNED = False - - #: Has the background greenlet run? - BG_RAN = False - - BG_GLET = None - - #: How many of these things have ever been allocated. - CREATED = 0 - - #: How many of these things have ever been deallocated. - DESTROYED = 0 - - #: How many were destroyed not in the main greenlet. There should always - #: be some. - #: If the test is broken or things change in the trashcan implementation, - #: this may not be correct. - DESTROYED_BG = 0 - - def __init__(self, sequence_number): - """ - :param sequence_number: The ordinal of this object during - one particular creation run. This is used to detect (guess, really) - when we have entered the trash can's deferred deallocation. - """ - self.i = sequence_number - Dealloc.CREATED += 1 - - def __del__(self): - if self.i == TRASH_UNWIND_LEVEL and not self.SPAWNED: - Dealloc.SPAWNED = greenlet.getcurrent() - other = Dealloc.BG_GLET = greenlet.greenlet(background_greenlet) - x = other.switch() - assert x == 42 - # It's important that we don't switch back to the greenlet, - # we leave it hanging there in an incomplete state. But we don't let it - # get collected, either. If we complete it now, while we're still - # in the scope of the initial trash can, things work out and we - # don't see the problem. We need this greenlet to complete - # at some point in the future, after we've exited this trash can invocation. - del other - elif self.i == 40 and greenlet.getcurrent() is not main: - Dealloc.BG_RAN = True - try: - main.switch(42) - except greenlet.GreenletExit as ex: - # We expect this; all references to us go away - # while we're still running, and we need to finish deleting - # ourself. - Dealloc.BG_RAN = type(ex) - del ex - - # Record the fact that we're dead last of all. This ensures that - # we actually get returned too. - Dealloc.DESTROYED += 1 - if greenlet.getcurrent() is not main: - Dealloc.DESTROYED_BG += 1 - - - def background_greenlet(): - # We direct through a second function, instead of - # directly calling ``make_some()``, so that we have complete - # control over when these objects are destroyed: we need them - # to be destroyed in the context of the background greenlet - t = make_some() - del t # Triggere deletion. - - def make_some(): - t = () - i = OBJECTS_PER_CONTAINER - while i: - # Nest the tuples; it's the recursion that gets us - # into trash. - t = (Dealloc(i), t) - i -= 1 - return t - - - some = make_some() - self.assertEqual(Dealloc.CREATED, OBJECTS_PER_CONTAINER) - self.assertEqual(Dealloc.DESTROYED, 0) - - # If we're going to crash, it should be on the following line. - # We only crash if ``assert()`` is enabled, of course. - del some - - # For non-debug builds of CPython, we won't crash. The best we can do is check - # the nesting level explicitly. - self.assertEqual(0, get_tstate_trash_delete_nesting()) - - # Discard this, raising GreenletExit into where it is waiting. - Dealloc.BG_GLET = None - # The same nesting level maintains. - self.assertEqual(0, get_tstate_trash_delete_nesting()) - - # We definitely cleaned some up in the background - self.assertGreater(Dealloc.DESTROYED_BG, 0) - - # Make sure all the cleanups happened. - self.assertIs(Dealloc.SPAWNED, main) - self.assertTrue(Dealloc.BG_RAN) - self.assertEqual(Dealloc.BG_RAN, greenlet.GreenletExit) - self.assertEqual(Dealloc.CREATED, Dealloc.DESTROYED ) - self.assertEqual(Dealloc.CREATED, OBJECTS_PER_CONTAINER * 2) - - import gc - gc.collect() - - -if __name__ == '__main__': - unittest.main() diff --git a/myenv/Lib/site-packages/greenlet/tests/test_leaks.py b/myenv/Lib/site-packages/greenlet/tests/test_leaks.py deleted file mode 100644 index ed1fa71..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_leaks.py +++ /dev/null @@ -1,443 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Testing scenarios that may have leaked. -""" -from __future__ import print_function, absolute_import, division - -import sys -import gc - -import time -import weakref -import threading - - -import greenlet -from . import TestCase -from .leakcheck import fails_leakcheck -from .leakcheck import ignores_leakcheck -from .leakcheck import RUNNING_ON_MANYLINUX - -# pylint:disable=protected-access - -assert greenlet.GREENLET_USE_GC # Option to disable this was removed in 1.0 - -class HasFinalizerTracksInstances(object): - EXTANT_INSTANCES = set() - def __init__(self, msg): - self.msg = sys.intern(msg) - self.EXTANT_INSTANCES.add(id(self)) - def __del__(self): - self.EXTANT_INSTANCES.remove(id(self)) - def __repr__(self): - return "" % ( - id(self), self.msg - ) - @classmethod - def reset(cls): - cls.EXTANT_INSTANCES.clear() - - -class TestLeaks(TestCase): - - def test_arg_refs(self): - args = ('a', 'b', 'c') - refcount_before = sys.getrefcount(args) - # pylint:disable=unnecessary-lambda - g = greenlet.greenlet( - lambda *args: greenlet.getcurrent().parent.switch(*args)) - for _ in range(100): - g.switch(*args) - self.assertEqual(sys.getrefcount(args), refcount_before) - - def test_kwarg_refs(self): - kwargs = {} - # pylint:disable=unnecessary-lambda - g = greenlet.greenlet( - lambda **kwargs: greenlet.getcurrent().parent.switch(**kwargs)) - for _ in range(100): - g.switch(**kwargs) - self.assertEqual(sys.getrefcount(kwargs), 2) - - - @staticmethod - def __recycle_threads(): - # By introducing a thread that does sleep we allow other threads, - # that have triggered their __block condition, but did not have a - # chance to deallocate their thread state yet, to finally do so. - # The way it works is by requiring a GIL switch (different thread), - # which does a GIL release (sleep), which might do a GIL switch - # to finished threads and allow them to clean up. - def worker(): - time.sleep(0.001) - t = threading.Thread(target=worker) - t.start() - time.sleep(0.001) - t.join(10) - - def test_threaded_leak(self): - gg = [] - def worker(): - # only main greenlet present - gg.append(weakref.ref(greenlet.getcurrent())) - for _ in range(2): - t = threading.Thread(target=worker) - t.start() - t.join(10) - del t - greenlet.getcurrent() # update ts_current - self.__recycle_threads() - greenlet.getcurrent() # update ts_current - gc.collect() - greenlet.getcurrent() # update ts_current - for g in gg: - self.assertIsNone(g()) - - def test_threaded_adv_leak(self): - gg = [] - def worker(): - # main and additional *finished* greenlets - ll = greenlet.getcurrent().ll = [] - def additional(): - ll.append(greenlet.getcurrent()) - for _ in range(2): - greenlet.greenlet(additional).switch() - gg.append(weakref.ref(greenlet.getcurrent())) - for _ in range(2): - t = threading.Thread(target=worker) - t.start() - t.join(10) - del t - greenlet.getcurrent() # update ts_current - self.__recycle_threads() - greenlet.getcurrent() # update ts_current - gc.collect() - greenlet.getcurrent() # update ts_current - for g in gg: - self.assertIsNone(g()) - - def assertClocksUsed(self): - used = greenlet._greenlet.get_clocks_used_doing_optional_cleanup() - self.assertGreaterEqual(used, 0) - # we don't lose the value - greenlet._greenlet.enable_optional_cleanup(True) - used2 = greenlet._greenlet.get_clocks_used_doing_optional_cleanup() - self.assertEqual(used, used2) - self.assertGreater(greenlet._greenlet.CLOCKS_PER_SEC, 1) - - def _check_issue251(self, - manually_collect_background=True, - explicit_reference_to_switch=False): - # See https://github.com/python-greenlet/greenlet/issues/251 - # Killing a greenlet (probably not the main one) - # in one thread from another thread would - # result in leaking a list (the ts_delkey list). - # We no longer use lists to hold that stuff, though. - - # For the test to be valid, even empty lists have to be tracked by the - # GC - - assert gc.is_tracked([]) - HasFinalizerTracksInstances.reset() - greenlet.getcurrent() - greenlets_before = self.count_objects(greenlet.greenlet, exact_kind=False) - - background_glet_running = threading.Event() - background_glet_killed = threading.Event() - background_greenlets = [] - - # XXX: Switching this to a greenlet subclass that overrides - # run results in all callers failing the leaktest; that - # greenlet instance is leaked. There's a bound method for - # run() living on the stack of the greenlet in g_initialstub, - # and since we don't manually switch back to the background - # greenlet to let it "fall off the end" and exit the - # g_initialstub function, it never gets cleaned up. Making the - # garbage collector aware of this bound method (making it an - # attribute of the greenlet structure and traversing into it) - # doesn't help, for some reason. - def background_greenlet(): - # Throw control back to the main greenlet. - jd = HasFinalizerTracksInstances("DELETING STACK OBJECT") - greenlet._greenlet.set_thread_local( - 'test_leaks_key', - HasFinalizerTracksInstances("DELETING THREAD STATE")) - # Explicitly keeping 'switch' in a local variable - # breaks this test in all versions - if explicit_reference_to_switch: - s = greenlet.getcurrent().parent.switch - s([jd]) - else: - greenlet.getcurrent().parent.switch([jd]) - - bg_main_wrefs = [] - - def background_thread(): - glet = greenlet.greenlet(background_greenlet) - bg_main_wrefs.append(weakref.ref(glet.parent)) - - background_greenlets.append(glet) - glet.switch() # Be sure it's active. - # Control is ours again. - del glet # Delete one reference from the thread it runs in. - background_glet_running.set() - background_glet_killed.wait(10) - - # To trigger the background collection of the dead - # greenlet, thus clearing out the contents of the list, we - # need to run some APIs. See issue 252. - if manually_collect_background: - greenlet.getcurrent() - - - t = threading.Thread(target=background_thread) - t.start() - background_glet_running.wait(10) - greenlet.getcurrent() - lists_before = self.count_objects(list, exact_kind=True) - - assert len(background_greenlets) == 1 - self.assertFalse(background_greenlets[0].dead) - # Delete the last reference to the background greenlet - # from a different thread. This puts it in the background thread's - # ts_delkey list. - del background_greenlets[:] - background_glet_killed.set() - - # Now wait for the background thread to die. - t.join(10) - del t - # As part of the fix for 252, we need to cycle the ceval.c - # interpreter loop to be sure it has had a chance to process - # the pending call. - self.wait_for_pending_cleanups() - - lists_after = self.count_objects(list, exact_kind=True) - greenlets_after = self.count_objects(greenlet.greenlet, exact_kind=False) - - # On 2.7, we observe that lists_after is smaller than - # lists_before. No idea what lists got cleaned up. All the - # Python 3 versions match exactly. - self.assertLessEqual(lists_after, lists_before) - # On versions after 3.6, we've successfully cleaned up the - # greenlet references thanks to the internal "vectorcall" - # protocol; prior to that, there is a reference path through - # the ``greenlet.switch`` method still on the stack that we - # can't reach to clean up. The C code goes through terrific - # lengths to clean that up. - if not explicit_reference_to_switch \ - and greenlet._greenlet.get_clocks_used_doing_optional_cleanup() is not None: - # If cleanup was disabled, though, we may not find it. - self.assertEqual(greenlets_after, greenlets_before) - if manually_collect_background: - # TODO: Figure out how to make this work! - # The one on the stack is still leaking somehow - # in the non-manually-collect state. - self.assertEqual(HasFinalizerTracksInstances.EXTANT_INSTANCES, set()) - else: - # The explicit reference prevents us from collecting it - # and it isn't always found by the GC either for some - # reason. The entire frame is leaked somehow, on some - # platforms (e.g., MacPorts builds of Python (all - # versions!)), but not on other platforms (the linux and - # windows builds on GitHub actions and Appveyor). So we'd - # like to write a test that proves that the main greenlet - # sticks around, and we can on my machine (macOS 11.6, - # MacPorts builds of everything) but we can't write that - # same test on other platforms. However, hopefully iteration - # done by leakcheck will find it. - pass - - if greenlet._greenlet.get_clocks_used_doing_optional_cleanup() is not None: - self.assertClocksUsed() - - def test_issue251_killing_cross_thread_leaks_list(self): - self._check_issue251() - - def test_issue251_with_cleanup_disabled(self): - greenlet._greenlet.enable_optional_cleanup(False) - try: - self._check_issue251() - finally: - greenlet._greenlet.enable_optional_cleanup(True) - - @fails_leakcheck - def test_issue251_issue252_need_to_collect_in_background(self): - # Between greenlet 1.1.2 and the next version, this was still - # failing because the leak of the list still exists when we - # don't call a greenlet API before exiting the thread. The - # proximate cause is that neither of the two greenlets from - # the background thread are actually being destroyed, even - # though the GC is in fact visiting both objects. It's not - # clear where that leak is? For some reason the thread-local - # dict holding it isn't being cleaned up. - # - # The leak, I think, is in the CPYthon internal function that - # calls into green_switch(). The argument tuple is still on - # the C stack somewhere and can't be reached? That doesn't - # make sense, because the tuple should be collectable when - # this object goes away. - # - # Note that this test sometimes spuriously passes on Linux, - # for some reason, but I've never seen it pass on macOS. - self._check_issue251(manually_collect_background=False) - - @fails_leakcheck - def test_issue251_issue252_need_to_collect_in_background_cleanup_disabled(self): - self.expect_greenlet_leak = True - greenlet._greenlet.enable_optional_cleanup(False) - try: - self._check_issue251(manually_collect_background=False) - finally: - greenlet._greenlet.enable_optional_cleanup(True) - - @fails_leakcheck - def test_issue251_issue252_explicit_reference_not_collectable(self): - self._check_issue251( - manually_collect_background=False, - explicit_reference_to_switch=True) - - UNTRACK_ATTEMPTS = 100 - - def _only_test_some_versions(self): - # We're only looking for this problem specifically on 3.11, - # and this set of tests is relatively fragile, depending on - # OS and memory management details. So we want to run it on 3.11+ - # (obviously) but not every older 3.x version in order to reduce - # false negatives. At the moment, those false results seem to have - # resolved, so we are actually running this on 3.8+ - assert sys.version_info[0] >= 3 - if sys.version_info[:2] < (3, 8): - self.skipTest('Only observed on 3.11') - if RUNNING_ON_MANYLINUX: - self.skipTest("Slow and not worth repeating here") - - @ignores_leakcheck - # Because we're just trying to track raw memory, not objects, and running - # the leakcheck makes an already slow test slower. - def test_untracked_memory_doesnt_increase(self): - # See https://github.com/gevent/gevent/issues/1924 - # and https://github.com/python-greenlet/greenlet/issues/328 - self._only_test_some_versions() - def f(): - return 1 - - ITER = 10000 - def run_it(): - for _ in range(ITER): - greenlet.greenlet(f).switch() - - # Establish baseline - for _ in range(3): - run_it() - - # uss: (Linux, macOS, Windows): aka "Unique Set Size", this is - # the memory which is unique to a process and which would be - # freed if the process was terminated right now. - uss_before = self.get_process_uss() - - for count in range(self.UNTRACK_ATTEMPTS): - uss_before = max(uss_before, self.get_process_uss()) - run_it() - - uss_after = self.get_process_uss() - if uss_after <= uss_before and count > 1: - break - - self.assertLessEqual(uss_after, uss_before) - - def _check_untracked_memory_thread(self, deallocate_in_thread=True): - self._only_test_some_versions() - # Like the above test, but what if there are a bunch of - # unfinished greenlets in a thread that dies? - # Does it matter if we deallocate in the thread or not? - EXIT_COUNT = [0] - - def f(): - try: - greenlet.getcurrent().parent.switch() - except greenlet.GreenletExit: - EXIT_COUNT[0] += 1 - raise - return 1 - - ITER = 10000 - def run_it(): - glets = [] - for _ in range(ITER): - # Greenlet starts, switches back to us. - # We keep a strong reference to the greenlet though so it doesn't - # get a GreenletExit exception. - g = greenlet.greenlet(f) - glets.append(g) - g.switch() - - return glets - - test = self - - class ThreadFunc: - uss_before = uss_after = 0 - glets = () - ITER = 2 - def __call__(self): - self.uss_before = test.get_process_uss() - - for _ in range(self.ITER): - self.glets += tuple(run_it()) - - for g in self.glets: - test.assertIn('suspended active', str(g)) - # Drop them. - if deallocate_in_thread: - self.glets = () - self.uss_after = test.get_process_uss() - - # Establish baseline - uss_before = uss_after = None - for count in range(self.UNTRACK_ATTEMPTS): - EXIT_COUNT[0] = 0 - thread_func = ThreadFunc() - t = threading.Thread(target=thread_func) - t.start() - t.join(30) - self.assertFalse(t.is_alive()) - - if uss_before is None: - uss_before = thread_func.uss_before - - uss_before = max(uss_before, thread_func.uss_before) - if deallocate_in_thread: - self.assertEqual(thread_func.glets, ()) - self.assertEqual(EXIT_COUNT[0], ITER * thread_func.ITER) - - del thread_func # Deallocate the greenlets; but this won't raise into them - del t - if not deallocate_in_thread: - self.assertEqual(EXIT_COUNT[0], 0) - if deallocate_in_thread: - self.wait_for_pending_cleanups() - - uss_after = self.get_process_uss() - # See if we achieve a non-growth state at some point. Break when we do. - if uss_after <= uss_before and count > 1: - break - - self.wait_for_pending_cleanups() - uss_after = self.get_process_uss() - self.assertLessEqual(uss_after, uss_before, "after attempts %d" % (count,)) - - @ignores_leakcheck - # Because we're just trying to track raw memory, not objects, and running - # the leakcheck makes an already slow test slower. - def test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_thread(self): - self._check_untracked_memory_thread(deallocate_in_thread=True) - - @ignores_leakcheck - # Because the main greenlets from the background threads do not exit in a timely fashion, - # we fail the object-based leakchecks. - def test_untracked_memory_doesnt_increase_unfinished_thread_dealloc_in_main(self): - self._check_untracked_memory_thread(deallocate_in_thread=False) - -if __name__ == '__main__': - __import__('unittest').main() diff --git a/myenv/Lib/site-packages/greenlet/tests/test_stack_saved.py b/myenv/Lib/site-packages/greenlet/tests/test_stack_saved.py deleted file mode 100644 index b362bf9..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_stack_saved.py +++ /dev/null @@ -1,19 +0,0 @@ -import greenlet -from . import TestCase - - -class Test(TestCase): - - def test_stack_saved(self): - main = greenlet.getcurrent() - self.assertEqual(main._stack_saved, 0) - - def func(): - main.switch(main._stack_saved) - - g = greenlet.greenlet(func) - x = g.switch() - self.assertGreater(x, 0) - self.assertGreater(g._stack_saved, 0) - g.switch() - self.assertEqual(g._stack_saved, 0) diff --git a/myenv/Lib/site-packages/greenlet/tests/test_throw.py b/myenv/Lib/site-packages/greenlet/tests/test_throw.py deleted file mode 100644 index f4f9a14..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_throw.py +++ /dev/null @@ -1,128 +0,0 @@ -import sys - - -from greenlet import greenlet -from . import TestCase - -def switch(*args): - return greenlet.getcurrent().parent.switch(*args) - - -class ThrowTests(TestCase): - def test_class(self): - def f(): - try: - switch("ok") - except RuntimeError: - switch("ok") - return - switch("fail") - g = greenlet(f) - res = g.switch() - self.assertEqual(res, "ok") - res = g.throw(RuntimeError) - self.assertEqual(res, "ok") - - def test_val(self): - def f(): - try: - switch("ok") - except RuntimeError: - val = sys.exc_info()[1] - if str(val) == "ciao": - switch("ok") - return - switch("fail") - - g = greenlet(f) - res = g.switch() - self.assertEqual(res, "ok") - res = g.throw(RuntimeError("ciao")) - self.assertEqual(res, "ok") - - g = greenlet(f) - res = g.switch() - self.assertEqual(res, "ok") - res = g.throw(RuntimeError, "ciao") - self.assertEqual(res, "ok") - - def test_kill(self): - def f(): - switch("ok") - switch("fail") - g = greenlet(f) - res = g.switch() - self.assertEqual(res, "ok") - res = g.throw() - self.assertTrue(isinstance(res, greenlet.GreenletExit)) - self.assertTrue(g.dead) - res = g.throw() # immediately eaten by the already-dead greenlet - self.assertTrue(isinstance(res, greenlet.GreenletExit)) - - def test_throw_goes_to_original_parent(self): - main = greenlet.getcurrent() - - def f1(): - try: - main.switch("f1 ready to catch") - except IndexError: - return "caught" - return "normal exit" - - def f2(): - main.switch("from f2") - - g1 = greenlet(f1) - g2 = greenlet(f2, parent=g1) - with self.assertRaises(IndexError): - g2.throw(IndexError) - self.assertTrue(g2.dead) - self.assertTrue(g1.dead) - - g1 = greenlet(f1) - g2 = greenlet(f2, parent=g1) - res = g1.switch() - self.assertEqual(res, "f1 ready to catch") - res = g2.throw(IndexError) - self.assertEqual(res, "caught") - self.assertTrue(g2.dead) - self.assertTrue(g1.dead) - - g1 = greenlet(f1) - g2 = greenlet(f2, parent=g1) - res = g1.switch() - self.assertEqual(res, "f1 ready to catch") - res = g2.switch() - self.assertEqual(res, "from f2") - res = g2.throw(IndexError) - self.assertEqual(res, "caught") - self.assertTrue(g2.dead) - self.assertTrue(g1.dead) - - def test_non_traceback_param(self): - with self.assertRaises(TypeError) as exc: - greenlet.getcurrent().throw( - Exception, - Exception(), - self - ) - self.assertEqual(str(exc.exception), - "throw() third argument must be a traceback object") - - def test_instance_of_wrong_type(self): - with self.assertRaises(TypeError) as exc: - greenlet.getcurrent().throw( - Exception(), - BaseException() - ) - - self.assertEqual(str(exc.exception), - "instance exception may not have a separate value") - - def test_not_throwable(self): - with self.assertRaises(TypeError) as exc: - greenlet.getcurrent().throw( - "abc" - ) - self.assertEqual(str(exc.exception), - "exceptions must be classes, or instances, not str") diff --git a/myenv/Lib/site-packages/greenlet/tests/test_tracing.py b/myenv/Lib/site-packages/greenlet/tests/test_tracing.py deleted file mode 100644 index c044d4b..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_tracing.py +++ /dev/null @@ -1,291 +0,0 @@ -from __future__ import print_function -import sys -import greenlet -import unittest - -from . import TestCase -from . import PY312 - -# https://discuss.python.org/t/cpython-3-12-greenlet-and-tracing-profiling-how-to-not-crash-and-get-correct-results/33144/2 -DEBUG_BUILD_PY312 = ( - PY312 and hasattr(sys, 'gettotalrefcount'), - "Broken on debug builds of Python 3.12" -) - -class SomeError(Exception): - pass - -class GreenletTracer(object): - oldtrace = None - - def __init__(self, error_on_trace=False): - self.actions = [] - self.error_on_trace = error_on_trace - - def __call__(self, *args): - self.actions.append(args) - if self.error_on_trace: - raise SomeError - - def __enter__(self): - self.oldtrace = greenlet.settrace(self) - return self.actions - - def __exit__(self, *args): - greenlet.settrace(self.oldtrace) - - -class TestGreenletTracing(TestCase): - """ - Tests of ``greenlet.settrace()`` - """ - - def test_a_greenlet_tracing(self): - main = greenlet.getcurrent() - def dummy(): - pass - def dummyexc(): - raise SomeError() - - with GreenletTracer() as actions: - g1 = greenlet.greenlet(dummy) - g1.switch() - g2 = greenlet.greenlet(dummyexc) - self.assertRaises(SomeError, g2.switch) - - self.assertEqual(actions, [ - ('switch', (main, g1)), - ('switch', (g1, main)), - ('switch', (main, g2)), - ('throw', (g2, main)), - ]) - - def test_b_exception_disables_tracing(self): - main = greenlet.getcurrent() - def dummy(): - main.switch() - g = greenlet.greenlet(dummy) - g.switch() - with GreenletTracer(error_on_trace=True) as actions: - self.assertRaises(SomeError, g.switch) - self.assertEqual(greenlet.gettrace(), None) - - self.assertEqual(actions, [ - ('switch', (main, g)), - ]) - - def test_set_same_tracer_twice(self): - # https://github.com/python-greenlet/greenlet/issues/332 - # Our logic in asserting that the tracefunction should - # gain a reference was incorrect if the same tracefunction was set - # twice. - tracer = GreenletTracer() - with tracer: - greenlet.settrace(tracer) - - -class PythonTracer(object): - oldtrace = None - - def __init__(self): - self.actions = [] - - def __call__(self, frame, event, arg): - # Record the co_name so we have an idea what function we're in. - self.actions.append((event, frame.f_code.co_name)) - - def __enter__(self): - self.oldtrace = sys.setprofile(self) - return self.actions - - def __exit__(self, *args): - sys.setprofile(self.oldtrace) - -def tpt_callback(): - return 42 - -class TestPythonTracing(TestCase): - """ - Tests of the interaction of ``sys.settrace()`` - with greenlet facilities. - - NOTE: Most of this is probably CPython specific. - """ - - maxDiff = None - - def test_trace_events_trivial(self): - with PythonTracer() as actions: - tpt_callback() - # If we use the sys.settrace instead of setprofile, we get - # this: - - # self.assertEqual(actions, [ - # ('call', 'tpt_callback'), - # ('call', '__exit__'), - # ]) - - self.assertEqual(actions, [ - ('return', '__enter__'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('call', '__exit__'), - ('c_call', '__exit__'), - ]) - - def _trace_switch(self, glet): - with PythonTracer() as actions: - glet.switch() - return actions - - def _check_trace_events_func_already_set(self, glet): - actions = self._trace_switch(glet) - self.assertEqual(actions, [ - ('return', '__enter__'), - ('c_call', '_trace_switch'), - ('call', 'run'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('return', 'run'), - ('c_return', '_trace_switch'), - ('call', '__exit__'), - ('c_call', '__exit__'), - ]) - - def test_trace_events_into_greenlet_func_already_set(self): - def run(): - return tpt_callback() - - self._check_trace_events_func_already_set(greenlet.greenlet(run)) - - def test_trace_events_into_greenlet_subclass_already_set(self): - class X(greenlet.greenlet): - def run(self): - return tpt_callback() - self._check_trace_events_func_already_set(X()) - - def _check_trace_events_from_greenlet_sets_profiler(self, g, tracer): - g.switch() - tpt_callback() - tracer.__exit__() - self.assertEqual(tracer.actions, [ - ('return', '__enter__'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('return', 'run'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('call', '__exit__'), - ('c_call', '__exit__'), - ]) - - - def test_trace_events_from_greenlet_func_sets_profiler(self): - tracer = PythonTracer() - def run(): - tracer.__enter__() - return tpt_callback() - - self._check_trace_events_from_greenlet_sets_profiler(greenlet.greenlet(run), - tracer) - - def test_trace_events_from_greenlet_subclass_sets_profiler(self): - tracer = PythonTracer() - class X(greenlet.greenlet): - def run(self): - tracer.__enter__() - return tpt_callback() - - self._check_trace_events_from_greenlet_sets_profiler(X(), tracer) - - @unittest.skipIf(*DEBUG_BUILD_PY312) - def test_trace_events_multiple_greenlets_switching(self): - tracer = PythonTracer() - - g1 = None - g2 = None - - def g1_run(): - tracer.__enter__() - tpt_callback() - g2.switch() - tpt_callback() - return 42 - - def g2_run(): - tpt_callback() - tracer.__exit__() - tpt_callback() - g1.switch() - - g1 = greenlet.greenlet(g1_run) - g2 = greenlet.greenlet(g2_run) - - x = g1.switch() - self.assertEqual(x, 42) - tpt_callback() # ensure not in the trace - self.assertEqual(tracer.actions, [ - ('return', '__enter__'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('c_call', 'g1_run'), - ('call', 'g2_run'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('call', '__exit__'), - ('c_call', '__exit__'), - ]) - - @unittest.skipIf(*DEBUG_BUILD_PY312) - def test_trace_events_multiple_greenlets_switching_siblings(self): - # Like the first version, but get both greenlets running first - # as "siblings" and then establish the tracing. - tracer = PythonTracer() - - g1 = None - g2 = None - - def g1_run(): - greenlet.getcurrent().parent.switch() - tracer.__enter__() - tpt_callback() - g2.switch() - tpt_callback() - return 42 - - def g2_run(): - greenlet.getcurrent().parent.switch() - - tpt_callback() - tracer.__exit__() - tpt_callback() - g1.switch() - - g1 = greenlet.greenlet(g1_run) - g2 = greenlet.greenlet(g2_run) - - # Start g1 - g1.switch() - # And it immediately returns control to us. - # Start g2 - g2.switch() - # Which also returns. Now kick of the real part of the - # test. - x = g1.switch() - self.assertEqual(x, 42) - - tpt_callback() # ensure not in the trace - self.assertEqual(tracer.actions, [ - ('return', '__enter__'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('c_call', 'g1_run'), - ('call', 'tpt_callback'), - ('return', 'tpt_callback'), - ('call', '__exit__'), - ('c_call', '__exit__'), - ]) - - -if __name__ == '__main__': - unittest.main() diff --git a/myenv/Lib/site-packages/greenlet/tests/test_version.py b/myenv/Lib/site-packages/greenlet/tests/test_version.py deleted file mode 100644 index 96c17cf..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_version.py +++ /dev/null @@ -1,41 +0,0 @@ -#! /usr/bin/env python -from __future__ import absolute_import -from __future__ import print_function - -import sys -import os -from unittest import TestCase as NonLeakingTestCase - -import greenlet - -# No reason to run this multiple times under leakchecks, -# it doesn't do anything. -class VersionTests(NonLeakingTestCase): - def test_version(self): - def find_dominating_file(name): - if os.path.exists(name): - return name - - tried = [] - here = os.path.abspath(os.path.dirname(__file__)) - for i in range(10): - up = ['..'] * i - path = [here] + up + [name] - fname = os.path.join(*path) - fname = os.path.abspath(fname) - tried.append(fname) - if os.path.exists(fname): - return fname - raise AssertionError("Could not find file " + name + "; checked " + str(tried)) - - try: - setup_py = find_dominating_file('setup.py') - except AssertionError as e: - self.skipTest("Unable to find setup.py; must be out of tree. " + str(e)) - - - invoke_setup = "%s %s --version" % (sys.executable, setup_py) - with os.popen(invoke_setup) as f: - sversion = f.read().strip() - - self.assertEqual(sversion, greenlet.__version__) diff --git a/myenv/Lib/site-packages/greenlet/tests/test_weakref.py b/myenv/Lib/site-packages/greenlet/tests/test_weakref.py deleted file mode 100644 index 05a38a7..0000000 --- a/myenv/Lib/site-packages/greenlet/tests/test_weakref.py +++ /dev/null @@ -1,35 +0,0 @@ -import gc -import weakref - - -import greenlet -from . import TestCase - -class WeakRefTests(TestCase): - def test_dead_weakref(self): - def _dead_greenlet(): - g = greenlet.greenlet(lambda: None) - g.switch() - return g - o = weakref.ref(_dead_greenlet()) - gc.collect() - self.assertEqual(o(), None) - - def test_inactive_weakref(self): - o = weakref.ref(greenlet.greenlet()) - gc.collect() - self.assertEqual(o(), None) - - def test_dealloc_weakref(self): - seen = [] - def worker(): - try: - greenlet.getcurrent().parent.switch() - finally: - seen.append(g()) - g = greenlet.greenlet(worker) - g.switch() - g2 = greenlet.greenlet(lambda: None, g) - g = weakref.ref(g2) - g2 = None - self.assertEqual(seen, [None]) diff --git a/myenv/Lib/site-packages/groq-0.11.0.dist-info/INSTALLER b/myenv/Lib/site-packages/groq-0.11.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/groq-0.11.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/groq-0.11.0.dist-info/METADATA b/myenv/Lib/site-packages/groq-0.11.0.dist-info/METADATA deleted file mode 100644 index b7ad9e7..0000000 --- a/myenv/Lib/site-packages/groq-0.11.0.dist-info/METADATA +++ /dev/null @@ -1,413 +0,0 @@ -Metadata-Version: 2.3 -Name: groq -Version: 0.11.0 -Summary: The official Python library for the groq API -Project-URL: Homepage, https://github.com/groq/groq-python -Project-URL: Repository, https://github.com/groq/groq-python -Author-email: Groq -License-Expression: Apache-2.0 -License-File: LICENSE -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Apache Software License -Classifier: Operating System :: MacOS -Classifier: Operating System :: Microsoft :: Windows -Classifier: Operating System :: OS Independent -Classifier: Operating System :: POSIX -Classifier: Operating System :: POSIX :: Linux -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Typing :: Typed -Requires-Python: >=3.7 -Requires-Dist: anyio<5,>=3.5.0 -Requires-Dist: cached-property; python_version < '3.8' -Requires-Dist: distro<2,>=1.7.0 -Requires-Dist: httpx<1,>=0.23.0 -Requires-Dist: pydantic<3,>=1.9.0 -Requires-Dist: sniffio -Requires-Dist: typing-extensions<5,>=4.7 -Description-Content-Type: text/markdown - -# Groq Python API library - -[![PyPI version](https://img.shields.io/pypi/v/groq.svg)](https://pypi.org/project/groq/) - -The Groq Python library provides convenient access to the Groq REST API from any Python 3.7+ -application. The library includes type definitions for all request params and response fields, -and offers both synchronous and asynchronous clients powered by [httpx](https://github.com/encode/httpx). - -It is generated with [Stainless](https://www.stainlessapi.com/). - -## Documentation - -The REST API documentation can be found on [console.groq.com](https://console.groq.com/docs). The full API of this library can be found in [api.md](https://github.com/groq/groq-python/tree/main/api.md). - -## Installation - -```sh -# install from PyPI -pip install groq -``` - -## Usage - -The full API of this library can be found in [api.md](https://github.com/groq/groq-python/tree/main/api.md). - -```python -import os -from groq import Groq - -client = Groq( - # This is the default and can be omitted - api_key=os.environ.get("GROQ_API_KEY"), -) - -chat_completion = client.chat.completions.create( - messages=[ - { - "role": "user", - "content": "Explain the importance of low latency LLMs", - } - ], - model="llama3-8b-8192", -) -print(chat_completion.choices[0].message.content) -``` - -While you can provide an `api_key` keyword argument, -we recommend using [python-dotenv](https://pypi.org/project/python-dotenv/) -to add `GROQ_API_KEY="My API Key"` to your `.env` file -so that your API Key is not stored in source control. - -## Async usage - -Simply import `AsyncGroq` instead of `Groq` and use `await` with each API call: - -```python -import os -import asyncio -from groq import AsyncGroq - -client = AsyncGroq( - # This is the default and can be omitted - api_key=os.environ.get("GROQ_API_KEY"), -) - - -async def main() -> None: - chat_completion = await client.chat.completions.create( - messages=[ - { - "role": "user", - "content": "Explain the importance of low latency LLMs", - } - ], - model="llama3-8b-8192", - ) - print(chat_completion.choices[0].message.content) - - -asyncio.run(main()) -``` - -Functionality between the synchronous and asynchronous clients is otherwise identical. - -## Using types - -Nested request parameters are [TypedDicts](https://docs.python.org/3/library/typing.html#typing.TypedDict). Responses are [Pydantic models](https://docs.pydantic.dev) which also provide helper methods for things like: - -- Serializing back into JSON, `model.to_json()` -- Converting to a dictionary, `model.to_dict()` - -Typed requests and responses provide autocomplete and documentation within your editor. If you would like to see type errors in VS Code to help catch bugs earlier, set `python.analysis.typeCheckingMode` to `basic`. - -## Handling errors - -When the library is unable to connect to the API (for example, due to network connection problems or a timeout), a subclass of `groq.APIConnectionError` is raised. - -When the API returns a non-success status code (that is, 4xx or 5xx -response), a subclass of `groq.APIStatusError` is raised, containing `status_code` and `response` properties. - -All errors inherit from `groq.APIError`. - -```python -import groq -from groq import Groq - -client = Groq() - -try: - client.chat.completions.create( - messages=[ - { - "role": "system", - "content": "You are a helpful assistant.", - }, - { - "role": "user", - "content": "Explain the importance of low latency LLMs", - }, - ], - model="llama3-8b-8192", - ) -except groq.APIConnectionError as e: - print("The server could not be reached") - print(e.__cause__) # an underlying Exception, likely raised within httpx. -except groq.RateLimitError as e: - print("A 429 status code was received; we should back off a bit.") -except groq.APIStatusError as e: - print("Another non-200-range status code was received") - print(e.status_code) - print(e.response) -``` - -Error codes are as followed: - -| Status Code | Error Type | -| ----------- | -------------------------- | -| 400 | `BadRequestError` | -| 401 | `AuthenticationError` | -| 403 | `PermissionDeniedError` | -| 404 | `NotFoundError` | -| 422 | `UnprocessableEntityError` | -| 429 | `RateLimitError` | -| >=500 | `InternalServerError` | -| N/A | `APIConnectionError` | - -### Retries - -Certain errors are automatically retried 2 times by default, with a short exponential backoff. -Connection errors (for example, due to a network connectivity problem), 408 Request Timeout, 409 Conflict, -429 Rate Limit, and >=500 Internal errors are all retried by default. - -You can use the `max_retries` option to configure or disable retry settings: - -```python -from groq import Groq - -# Configure the default for all requests: -client = Groq( - # default is 2 - max_retries=0, -) - -# Or, configure per-request: -client.with_options(max_retries=5).chat.completions.create( - messages=[ - { - "role": "system", - "content": "You are a helpful assistant.", - }, - { - "role": "user", - "content": "Explain the importance of low latency LLMs", - }, - ], - model="llama3-8b-8192", -) -``` - -### Timeouts - -By default requests time out after 1 minute. You can configure this with a `timeout` option, -which accepts a float or an [`httpx.Timeout`](https://www.python-httpx.org/advanced/#fine-tuning-the-configuration) object: - -```python -from groq import Groq - -# Configure the default for all requests: -client = Groq( - # 20 seconds (default is 1 minute) - timeout=20.0, -) - -# More granular control: -client = Groq( - timeout=httpx.Timeout(60.0, read=5.0, write=10.0, connect=2.0), -) - -# Override per-request: -client.with_options(timeout=5.0).chat.completions.create( - messages=[ - { - "role": "system", - "content": "You are a helpful assistant.", - }, - { - "role": "user", - "content": "Explain the importance of low latency LLMs", - }, - ], - model="llama3-8b-8192", -) -``` - -On timeout, an `APITimeoutError` is thrown. - -Note that requests that time out are [retried twice by default](https://github.com/groq/groq-python/tree/main/#retries). - -## Advanced - -### Logging - -We use the standard library [`logging`](https://docs.python.org/3/library/logging.html) module. - -You can enable logging by setting the environment variable `GROQ_LOG` to `debug`. - -```shell -$ export GROQ_LOG=debug -``` - -### How to tell whether `None` means `null` or missing - -In an API response, a field may be explicitly `null`, or missing entirely; in either case, its value is `None` in this library. You can differentiate the two cases with `.model_fields_set`: - -```py -if response.my_field is None: - if 'my_field' not in response.model_fields_set: - print('Got json like {}, without a "my_field" key present at all.') - else: - print('Got json like {"my_field": null}.') -``` - -### Accessing raw response data (e.g. headers) - -The "raw" Response object can be accessed by prefixing `.with_raw_response.` to any HTTP method call, e.g., - -```py -from groq import Groq - -client = Groq() -response = client.chat.completions.with_raw_response.create( - messages=[{ - "role": "system", - "content": "You are a helpful assistant.", - }, { - "role": "user", - "content": "Explain the importance of low latency LLMs", - }], - model="llama3-8b-8192", -) -print(response.headers.get('X-My-Header')) - -completion = response.parse() # get the object that `chat.completions.create()` would have returned -print(completion.id) -``` - -These methods return an [`APIResponse`](https://github.com/groq/groq-python/tree/main/src/groq/_response.py) object. - -The async client returns an [`AsyncAPIResponse`](https://github.com/groq/groq-python/tree/main/src/groq/_response.py) with the same structure, the only difference being `await`able methods for reading the response content. - -#### `.with_streaming_response` - -The above interface eagerly reads the full response body when you make the request, which may not always be what you want. - -To stream the response body, use `.with_streaming_response` instead, which requires a context manager and only reads the response body once you call `.read()`, `.text()`, `.json()`, `.iter_bytes()`, `.iter_text()`, `.iter_lines()` or `.parse()`. In the async client, these are async methods. - -```python -with client.chat.completions.with_streaming_response.create( - messages=[ - { - "role": "system", - "content": "You are a helpful assistant.", - }, - { - "role": "user", - "content": "Explain the importance of low latency LLMs", - }, - ], - model="llama3-8b-8192", -) as response: - print(response.headers.get("X-My-Header")) - - for line in response.iter_lines(): - print(line) -``` - -The context manager is required so that the response will reliably be closed. - -### Making custom/undocumented requests - -This library is typed for convenient access to the documented API. - -If you need to access undocumented endpoints, params, or response properties, the library can still be used. - -#### Undocumented endpoints - -To make requests to undocumented endpoints, you can make requests using `client.get`, `client.post`, and other -http verbs. Options on the client will be respected (such as retries) will be respected when making this -request. - -```py -import httpx - -response = client.post( - "/foo", - cast_to=httpx.Response, - body={"my_param": True}, -) - -print(response.headers.get("x-foo")) -``` - -#### Undocumented request params - -If you want to explicitly send an extra param, you can do so with the `extra_query`, `extra_body`, and `extra_headers` request -options. - -#### Undocumented response properties - -To access undocumented response properties, you can access the extra fields like `response.unknown_prop`. You -can also get all the extra fields on the Pydantic model as a dict with -[`response.model_extra`](https://docs.pydantic.dev/latest/api/base_model/#pydantic.BaseModel.model_extra). - -### Configuring the HTTP client - -You can directly override the [httpx client](https://www.python-httpx.org/api/#client) to customize it for your use case, including: - -- Support for proxies -- Custom transports -- Additional [advanced](https://www.python-httpx.org/advanced/clients/) functionality - -```python -from groq import Groq, DefaultHttpxClient - -client = Groq( - # Or use the `GROQ_BASE_URL` env var - base_url="http://my.test.server.example.com:8083", - http_client=DefaultHttpxClient( - proxies="http://my.test.proxy.example.com", - transport=httpx.HTTPTransport(local_address="0.0.0.0"), - ), -) -``` - -You can also customize the client on a per-request basis by using `with_options()`: - -```python -client.with_options(http_client=DefaultHttpxClient(...)) -``` - -### Managing HTTP resources - -By default the library closes underlying HTTP connections whenever the client is [garbage collected](https://docs.python.org/3/reference/datamodel.html#object.__del__). You can manually close the client using the `.close()` method if desired, or with a context manager that closes when exiting. - -## Versioning - -This package generally follows [SemVer](https://semver.org/spec/v2.0.0.html) conventions, though certain backwards-incompatible changes may be released as minor versions: - -1. Changes that only affect static types, without breaking runtime behavior. -2. Changes to library internals which are technically public but not intended or documented for external use. _(Please open a GitHub issue to let us know if you are relying on such internals)_. -3. Changes that we do not expect to impact the vast majority of users in practice. - -We take backwards-compatibility seriously and work hard to ensure you can rely on a smooth upgrade experience. - -We are keen for your feedback; please open an [issue](https://www.github.com/groq/groq-python/issues) with questions, bugs, or suggestions. - -## Requirements - -Python 3.7 or higher. diff --git a/myenv/Lib/site-packages/groq-0.11.0.dist-info/RECORD b/myenv/Lib/site-packages/groq-0.11.0.dist-info/RECORD deleted file mode 100644 index 0ad9bfe..0000000 --- a/myenv/Lib/site-packages/groq-0.11.0.dist-info/RECORD +++ /dev/null @@ -1,158 +0,0 @@ -groq-0.11.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -groq-0.11.0.dist-info/METADATA,sha256=-oCoO551g_9IvroyIZSPcaNuw2-I-0GYlNGa_KnP_2g,13586 -groq-0.11.0.dist-info/RECORD,, -groq-0.11.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -groq-0.11.0.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 -groq-0.11.0.dist-info/licenses/LICENSE,sha256=Ly0cOEO0pgTcVTdUIkuahUEXsWgkofjoH0oKiSXd4RI,11334 -groq/__init__.py,sha256=-fn5GXv-sM0rPVI9FM6Tisk2y1MQ40ttoVG4ILB3dbo,2381 -groq/__pycache__/__init__.cpython-312.pyc,, -groq/__pycache__/_base_client.cpython-312.pyc,, -groq/__pycache__/_client.cpython-312.pyc,, -groq/__pycache__/_compat.cpython-312.pyc,, -groq/__pycache__/_constants.cpython-312.pyc,, -groq/__pycache__/_exceptions.cpython-312.pyc,, -groq/__pycache__/_files.cpython-312.pyc,, -groq/__pycache__/_models.cpython-312.pyc,, -groq/__pycache__/_qs.cpython-312.pyc,, -groq/__pycache__/_resource.cpython-312.pyc,, -groq/__pycache__/_response.cpython-312.pyc,, -groq/__pycache__/_streaming.cpython-312.pyc,, -groq/__pycache__/_types.cpython-312.pyc,, -groq/__pycache__/_version.cpython-312.pyc,, -groq/_base_client.py,sha256=4fsAPa6hLKuKXpABUl9d9-osW3NRynqkptgcvPzN_AI,66414 -groq/_client.py,sha256=zHFDPX3lfgZOAsbKQ322Acz--ZuGE2wZC50tYT6yAUU,16214 -groq/_compat.py,sha256=FgGcnNlyW7uHKyGh_Wvo7qZi-zVPmHx7mhb3F1GEZSw,6430 -groq/_constants.py,sha256=JE8kyZa2Q4NK_i4fO--8siEYTzeHnT0fYbOFDgDP4uk,464 -groq/_exceptions.py,sha256=vwW8IIMnMqCbK8vALhJ-xypkaRN5t44TWdlOPPvs72o,3216 -groq/_files.py,sha256=mf4dOgL4b0ryyZlbqLhggD3GVgDf6XxdGFAgce01ugE,3549 -groq/_models.py,sha256=pXirq94yiihFXBbiR50vA-0NIlDurxbJ_rLXK062vWQ,28267 -groq/_qs.py,sha256=AOkSz4rHtK4YI3ZU_kzea-zpwBUgEY8WniGmTPyEimc,4846 -groq/_resource.py,sha256=wjIIjnrEuLKT2QMKq7EyqL3MAj8sBzVpXZ3aiNgO-B8,1088 -groq/_response.py,sha256=y-H7bbYRbVdYYM_BrGq84tfK0ZPU7nqtChuYKiTZFqM,28508 -groq/_streaming.py,sha256=xRlSI9YDpN6dj4rOGP6-q4OVQHID7zdd9jmb_JXxH-c,13094 -groq/_types.py,sha256=fo_SwJjxbOXEZi6JW4a_lmGjSTW8mAglArzQ8m125eY,6101 -groq/_utils/__init__.py,sha256=Uzq1-FIih_VUjzdNVWXks0sdC39KBKLMrZoz-_JOjJ4,1988 -groq/_utils/__pycache__/__init__.cpython-312.pyc,, -groq/_utils/__pycache__/_logs.cpython-312.pyc,, -groq/_utils/__pycache__/_proxy.cpython-312.pyc,, -groq/_utils/__pycache__/_reflection.cpython-312.pyc,, -groq/_utils/__pycache__/_streams.cpython-312.pyc,, -groq/_utils/__pycache__/_sync.cpython-312.pyc,, -groq/_utils/__pycache__/_transform.cpython-312.pyc,, -groq/_utils/__pycache__/_typing.cpython-312.pyc,, -groq/_utils/__pycache__/_utils.cpython-312.pyc,, -groq/_utils/_logs.py,sha256=5CMKGQJLSze48WFXOeqP08hwJxTyTtRuBuXv0bcWy_U,768 -groq/_utils/_proxy.py,sha256=z3zsateHtb0EARTWKk8QZNHfPkqJbqwd1lM993LBwGE,1902 -groq/_utils/_reflection.py,sha256=ZmGkIgT_PuwedyNBrrKGbxoWtkpytJNU1uU4QHnmEMU,1364 -groq/_utils/_streams.py,sha256=SMC90diFFecpEg_zgDRVbdR3hSEIgVVij4taD-noMLM,289 -groq/_utils/_sync.py,sha256=9ex9pfOyd8xAF1LxpFx4IkqL8k0vk8srE2Ee-OTMQ0A,2840 -groq/_utils/_transform.py,sha256=NCz3q9_O-vuj60xVe-qzhEQ8uJWlZWJTsM-GwHDccf8,12958 -groq/_utils/_typing.py,sha256=tFbktdpdHCQliwzGsWysgn0P5H0JRdagkZdb_LegGkY,3838 -groq/_utils/_utils.py,sha256=LMVTMZG8pfu8AkJNSfmv_z3guQlOfm2UxDTjTTXggfg,11411 -groq/_version.py,sha256=FKMtbjojPZXVp3tr2H_DYdOOfldED9ZopRxIIKZnmOM,157 -groq/lib/.keep,sha256=wuNrz-5SXo3jJaJOJgz4vFHM41YH_g20F5cRQo0vLes,224 -groq/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -groq/resources/__init__.py,sha256=z79QNnOPR8AfEpWSL_u0S5OYWRRESsLCYjO4LIole5M,1531 -groq/resources/__pycache__/__init__.cpython-312.pyc,, -groq/resources/__pycache__/embeddings.cpython-312.pyc,, -groq/resources/__pycache__/models.cpython-312.pyc,, -groq/resources/audio/__init__.py,sha256=-t8JMdMaPeCG7px5K80Ar1tdkMVEr3k390RuG-n25Cc,1333 -groq/resources/audio/__pycache__/__init__.cpython-312.pyc,, -groq/resources/audio/__pycache__/audio.cpython-312.pyc,, -groq/resources/audio/__pycache__/transcriptions.cpython-312.pyc,, -groq/resources/audio/__pycache__/translations.cpython-312.pyc,, -groq/resources/audio/audio.py,sha256=Qjz9Ert7bCvYYn6dOzyAXICilk7jFDHyUmVYFM8idFQ,3560 -groq/resources/audio/transcriptions.py,sha256=AtSvjIQpOuEu4P5JFVSLQM9Fyg970HhZm60vPDlFt88,15341 -groq/resources/audio/translations.py,sha256=qZYLv4BlJoIj1LF03U9NGnOrFmLnvt_HPtHDoTcKgGU,8875 -groq/resources/chat/__init__.py,sha256=8Q9ODRo1wIpFa34VaNwuaWFmxqFxagDtUhIAkQNvxEU,849 -groq/resources/chat/__pycache__/__init__.cpython-312.pyc,, -groq/resources/chat/__pycache__/chat.cpython-312.pyc,, -groq/resources/chat/__pycache__/completions.cpython-312.pyc,, -groq/resources/chat/chat.py,sha256=Edexhbq1anfSS_I0wNRQb7rx1OV6-rq4sxgVlYDGb6Y,2342 -groq/resources/chat/completions.py,sha256=MH4cjLLWTRliTGwE-LbkM3uK2OgSz8SRVmL2rcufM-I,33511 -groq/resources/embeddings.py,sha256=_fuAwH5EnghsTlKpAcVFy96puFA_VMqUXb88bGVX0-Y,7100 -groq/resources/models.py,sha256=1-eTSLTomd1Ae2oaPx4CS6nk7wr2gjk9Yfuec3gLkXg,9530 -groq/types/__init__.py,sha256=1Xg4sGZRQC-WwZ0O0eLH2pTYpEaR0X2uBoN5KNd8XXY,718 -groq/types/__pycache__/__init__.cpython-312.pyc,, -groq/types/__pycache__/completion_usage.cpython-312.pyc,, -groq/types/__pycache__/create_embedding_response.cpython-312.pyc,, -groq/types/__pycache__/embedding.cpython-312.pyc,, -groq/types/__pycache__/embedding_create_params.cpython-312.pyc,, -groq/types/__pycache__/model.cpython-312.pyc,, -groq/types/__pycache__/model_deleted.cpython-312.pyc,, -groq/types/__pycache__/model_list_response.cpython-312.pyc,, -groq/types/audio/__init__.py,sha256=D6Tk4GQbFtUML4-MfgTaQfOnZdhDkx6shTnDpmeKvt8,419 -groq/types/audio/__pycache__/__init__.cpython-312.pyc,, -groq/types/audio/__pycache__/transcription.cpython-312.pyc,, -groq/types/audio/__pycache__/transcription_create_params.cpython-312.pyc,, -groq/types/audio/__pycache__/translation.cpython-312.pyc,, -groq/types/audio/__pycache__/translation_create_params.cpython-312.pyc,, -groq/types/audio/transcription.py,sha256=jP13KGV0ZSgK3FkIZueDLrH4Yhafp5FkXBEP85deBAo,231 -groq/types/audio/transcription_create_params.py,sha256=rVmXB6EL64EZKIpDRNUaCCd6V6F6Ho7Bvlgrij2s6cI,3950 -groq/types/audio/translation.py,sha256=_PhTtQ-s1yc-4kAKlgc88FTqUpXnNYfM2ld5IuRRGkA,195 -groq/types/audio/translation_create_params.py,sha256=2OjCat7Uahbr1pM9ScLS9BPUpltmqy9zazsiiGa8Qto,1442 -groq/types/chat/__init__.py,sha256=waUSt926WgCuTOyde_4XXkn36Hd6GhiPZr2KWYMZVy0,2464 -groq/types/chat/__pycache__/__init__.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_assistant_message_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_chunk.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_content_part_image_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_content_part_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_content_part_text_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_function_call_option_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_function_message_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_message.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_message_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_message_tool_call.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_message_tool_call_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_named_tool_choice_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_role.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_system_message_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_token_logprob.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_tool_choice_option_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_tool_message_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_tool_param.cpython-312.pyc,, -groq/types/chat/__pycache__/chat_completion_user_message_param.cpython-312.pyc,, -groq/types/chat/__pycache__/completion_create_params.cpython-312.pyc,, -groq/types/chat/chat_completion.py,sha256=jezM24i54aMbWuffDapMxvmnaBx_pdTeQmG9IQAFBIY,2219 -groq/types/chat/chat_completion_assistant_message_param.py,sha256=PjStSmY6qhbuaiLZLLkAzitRO9aJQc9OBdwz0xjRxbs,1618 -groq/types/chat/chat_completion_chunk.py,sha256=W5VFBbg1iN0ewC4kUWZ0x9LlLtXro7yS5hUmGjJwc-o,5089 -groq/types/chat/chat_completion_content_part_image_param.py,sha256=_JTAMYdbTc60hSedXsMQhFyLi1jGynraf-Ig5FU2k1c,660 -groq/types/chat/chat_completion_content_part_param.py,sha256=8hoTnNqerHjaHGMFU8CvhjVbH8yChXEYxs3jLWKfod8,543 -groq/types/chat/chat_completion_content_part_text_param.py,sha256=4IpiXMKM9AuTyop5PRptPBbBhh9s93xy2vjg4Yw6NIw,429 -groq/types/chat/chat_completion_function_call_option_param.py,sha256=M-IqWHyBLkvYBcwFxxp4ydCIxbPDaMlNl4bik9UoFd4,365 -groq/types/chat/chat_completion_function_message_param.py,sha256=jIaZbBHHbt4v4xHCIyvYtYLst_X4jOznRjYNcTf0MF0,591 -groq/types/chat/chat_completion_message.py,sha256=19e2EL6cHZA6EeOVPgI_LbN3UwNLKizhtxuXnxLzhX0,1282 -groq/types/chat/chat_completion_message_param.py,sha256=RFer4ZYXxVed9F0ulkqi0xNy_eOhp63Y-0oN24dhVBI,889 -groq/types/chat/chat_completion_message_tool_call.py,sha256=XlIe2vhSYvrt8o8Yol5AQqnacI1xHqpEIV26G4oNrZY,900 -groq/types/chat/chat_completion_message_tool_call_param.py,sha256=XNhuUpGr5qwVTo0K8YavJwleHYSdwN_urK51eKlqC24,1009 -groq/types/chat/chat_completion_named_tool_choice_param.py,sha256=JsxfSJYpOmF7zIreQ0JrXRSLp07OGCBSycRRcF6OZmg,569 -groq/types/chat/chat_completion_role.py,sha256=Rdzg4deI1uZmqgkwnMrLHvbV2fPRqKcHLQrVmKVk9Dw,262 -groq/types/chat/chat_completion_system_message_param.py,sha256=qWEJupmzMuUa82V7OoLeQF92SKE1QoU4cXfX2o43x9E,638 -groq/types/chat/chat_completion_token_logprob.py,sha256=6-ipUFfsXMf5L7FDFi127NaVkDtmEooVgGBF6Ts965A,1769 -groq/types/chat/chat_completion_tool_choice_option_param.py,sha256=ef71WSM9HMQhIQUocRgVJUVW-bSRwK2_1NjFSB5TPiI,472 -groq/types/chat/chat_completion_tool_message_param.py,sha256=B-PST-J1VwPjaKLpzpmqfEsHlr5Owb54dnQoIhbvuY4,553 -groq/types/chat/chat_completion_tool_param.py,sha256=J9r2TAWygkIBDInWEKx29gBE0wiCgc7HpXFyQhxSkAU,503 -groq/types/chat/chat_completion_user_message_param.py,sha256=mik-MRkwb543C5FSJ52LtTkeA2E_HdLUgtoHEdO73XQ,792 -groq/types/chat/completion_create_params.py,sha256=g7AETAh9w3bbeRldBFUtdOUGG4f7XgAQuB6nLj88y1U,7764 -groq/types/completion_usage.py,sha256=MkWLiOmSPCveQU9kp_moCXPS1GASBQhq7ecAMNOeyiQ,809 -groq/types/create_embedding_response.py,sha256=lTAu_Pym76kFljDnnDRoDB2GNQSzWmwwlqf5ff7FNPM,798 -groq/types/embedding.py,sha256=rlM_cs8um94yOf9kHWlURzVyGsIjzvzD0xzEjMaUVpU,629 -groq/types/embedding_create_params.py,sha256=zbdG6oBqDKXzPw3QqL6NR_ZJhkBUFaEA53keEECHxn8,1054 -groq/types/model.py,sha256=DMw8KwQx8B6S6sAI038D0xdzkmYdY5-r0oMhCUG4l6w,532 -groq/types/model_deleted.py,sha256=rDGU-Ul4lMfNf5XxKNxZKo9CQPGsrkrzqnhl00GLMi4,230 -groq/types/model_list_response.py,sha256=fUKK6I7am-Sx4cnC7nC3mHEhOKZXbkTRHLzxiApK3pA,329 -groq/types/shared/__init__.py,sha256=eoiCHGKeY1_YjOn41M8QxvIUI_M68Ltsr1d67g_Pr-I,288 -groq/types/shared/__pycache__/__init__.cpython-312.pyc,, -groq/types/shared/__pycache__/error_object.cpython-312.pyc,, -groq/types/shared/__pycache__/function_definition.cpython-312.pyc,, -groq/types/shared/__pycache__/function_parameters.cpython-312.pyc,, -groq/types/shared/error_object.py,sha256=G7SGPZ9Qw3gewTKbi3fK69eM6L2Ur0C2D57N8iEapJA,305 -groq/types/shared/function_definition.py,sha256=oMxY-W_pwcX6Inohy7Pax6GKSNTnhqbNSrW68nvT4fk,1012 -groq/types/shared/function_parameters.py,sha256=Dkc_pm98zCKyouQmYrl934cK8ZWX7heY_IIyunW8x7c,236 -groq/types/shared_params/__init__.py,sha256=Jaw3mmmUB3Ky7vL1fzsh-8kAJEbeYxcQ0JOy7p765Xo,235 -groq/types/shared_params/__pycache__/__init__.cpython-312.pyc,, -groq/types/shared_params/__pycache__/function_definition.cpython-312.pyc,, -groq/types/shared_params/__pycache__/function_parameters.cpython-312.pyc,, -groq/types/shared_params/function_definition.py,sha256=2NFZiFLSY3XzCOMGimq6sPKLpOuSIVSuvMJTwDNiJAI,1026 -groq/types/shared_params/function_parameters.py,sha256=UvxKz_3b9b5ECwWr8RFrIH511htbU2JZsp9Z9BMkF-o,272 diff --git a/myenv/Lib/site-packages/groq-0.11.0.dist-info/REQUESTED b/myenv/Lib/site-packages/groq-0.11.0.dist-info/REQUESTED deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/groq-0.11.0.dist-info/WHEEL b/myenv/Lib/site-packages/groq-0.11.0.dist-info/WHEEL deleted file mode 100644 index cdd68a4..0000000 --- a/myenv/Lib/site-packages/groq-0.11.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.25.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/groq-0.11.0.dist-info/licenses/LICENSE b/myenv/Lib/site-packages/groq-0.11.0.dist-info/licenses/LICENSE deleted file mode 100644 index 76a2456..0000000 --- a/myenv/Lib/site-packages/groq-0.11.0.dist-info/licenses/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2024 Groq - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/myenv/Lib/site-packages/groq/__init__.py b/myenv/Lib/site-packages/groq/__init__.py deleted file mode 100644 index 25f858e..0000000 --- a/myenv/Lib/site-packages/groq/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from . import types -from ._types import NOT_GIVEN, NoneType, NotGiven, Transport, ProxiesTypes -from ._utils import file_from_path -from ._client import Groq, Client, Stream, Timeout, AsyncGroq, Transport, AsyncClient, AsyncStream, RequestOptions -from ._models import BaseModel -from ._version import __title__, __version__ -from ._response import APIResponse as APIResponse, AsyncAPIResponse as AsyncAPIResponse -from ._constants import DEFAULT_TIMEOUT, DEFAULT_MAX_RETRIES, DEFAULT_CONNECTION_LIMITS -from ._exceptions import ( - APIError, - GroqError, - ConflictError, - NotFoundError, - APIStatusError, - RateLimitError, - APITimeoutError, - BadRequestError, - APIConnectionError, - AuthenticationError, - InternalServerError, - PermissionDeniedError, - UnprocessableEntityError, - APIResponseValidationError, -) -from ._base_client import DefaultHttpxClient, DefaultAsyncHttpxClient -from ._utils._logs import setup_logging as _setup_logging - -__all__ = [ - "types", - "__version__", - "__title__", - "NoneType", - "Transport", - "ProxiesTypes", - "NotGiven", - "NOT_GIVEN", - "GroqError", - "APIError", - "APIStatusError", - "APITimeoutError", - "APIConnectionError", - "APIResponseValidationError", - "BadRequestError", - "AuthenticationError", - "PermissionDeniedError", - "NotFoundError", - "ConflictError", - "UnprocessableEntityError", - "RateLimitError", - "InternalServerError", - "Timeout", - "RequestOptions", - "Client", - "AsyncClient", - "Stream", - "AsyncStream", - "Groq", - "AsyncGroq", - "file_from_path", - "BaseModel", - "DEFAULT_TIMEOUT", - "DEFAULT_MAX_RETRIES", - "DEFAULT_CONNECTION_LIMITS", - "DefaultHttpxClient", - "DefaultAsyncHttpxClient", -] - -_setup_logging() - -# Update the __module__ attribute for exported symbols so that -# error messages point to this module instead of the module -# it was originally defined in, e.g. -# groq._exceptions.NotFoundError -> groq.NotFoundError -__locals = locals() -for __name in __all__: - if not __name.startswith("__"): - try: - __locals[__name].__module__ = "groq" - except (TypeError, AttributeError): - # Some of our exported symbols are builtins which we can't set attributes for. - pass diff --git a/myenv/Lib/site-packages/groq/_base_client.py b/myenv/Lib/site-packages/groq/_base_client.py deleted file mode 100644 index 2311063..0000000 --- a/myenv/Lib/site-packages/groq/_base_client.py +++ /dev/null @@ -1,2019 +0,0 @@ -from __future__ import annotations - -import sys -import json -import time -import uuid -import email -import asyncio -import inspect -import logging -import platform -import warnings -import email.utils -from types import TracebackType -from random import random -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Type, - Union, - Generic, - Mapping, - TypeVar, - Iterable, - Iterator, - Optional, - Generator, - AsyncIterator, - cast, - overload, -) -from typing_extensions import Literal, override, get_origin - -import anyio -import httpx -import distro -import pydantic -from httpx import URL, Limits -from pydantic import PrivateAttr - -from . import _exceptions -from ._qs import Querystring -from ._files import to_httpx_files, async_to_httpx_files -from ._types import ( - NOT_GIVEN, - Body, - Omit, - Query, - Headers, - Timeout, - NotGiven, - ResponseT, - Transport, - AnyMapping, - PostParser, - ProxiesTypes, - RequestFiles, - HttpxSendArgs, - AsyncTransport, - RequestOptions, - HttpxRequestFiles, - ModelBuilderProtocol, -) -from ._utils import is_dict, is_list, asyncify, is_given, lru_cache, is_mapping -from ._compat import model_copy, model_dump -from ._models import GenericModel, FinalRequestOptions, validate_type, construct_type -from ._response import ( - APIResponse, - BaseAPIResponse, - AsyncAPIResponse, - extract_response_type, -) -from ._constants import ( - DEFAULT_TIMEOUT, - MAX_RETRY_DELAY, - DEFAULT_MAX_RETRIES, - INITIAL_RETRY_DELAY, - RAW_RESPONSE_HEADER, - OVERRIDE_CAST_TO_HEADER, - DEFAULT_CONNECTION_LIMITS, -) -from ._streaming import Stream, SSEDecoder, AsyncStream, SSEBytesDecoder -from ._exceptions import ( - APIStatusError, - APITimeoutError, - APIConnectionError, - APIResponseValidationError, -) - -log: logging.Logger = logging.getLogger(__name__) - -# TODO: make base page type vars covariant -SyncPageT = TypeVar("SyncPageT", bound="BaseSyncPage[Any]") -AsyncPageT = TypeVar("AsyncPageT", bound="BaseAsyncPage[Any]") - - -_T = TypeVar("_T") -_T_co = TypeVar("_T_co", covariant=True) - -_StreamT = TypeVar("_StreamT", bound=Stream[Any]) -_AsyncStreamT = TypeVar("_AsyncStreamT", bound=AsyncStream[Any]) - -if TYPE_CHECKING: - from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT -else: - try: - from httpx._config import DEFAULT_TIMEOUT_CONFIG as HTTPX_DEFAULT_TIMEOUT - except ImportError: - # taken from https://github.com/encode/httpx/blob/3ba5fe0d7ac70222590e759c31442b1cab263791/httpx/_config.py#L366 - HTTPX_DEFAULT_TIMEOUT = Timeout(5.0) - - -class PageInfo: - """Stores the necessary information to build the request to retrieve the next page. - - Either `url` or `params` must be set. - """ - - url: URL | NotGiven - params: Query | NotGiven - - @overload - def __init__( - self, - *, - url: URL, - ) -> None: ... - - @overload - def __init__( - self, - *, - params: Query, - ) -> None: ... - - def __init__( - self, - *, - url: URL | NotGiven = NOT_GIVEN, - params: Query | NotGiven = NOT_GIVEN, - ) -> None: - self.url = url - self.params = params - - -class BasePage(GenericModel, Generic[_T]): - """ - Defines the core interface for pagination. - - Type Args: - ModelT: The pydantic model that represents an item in the response. - - Methods: - has_next_page(): Check if there is another page available - next_page_info(): Get the necessary information to make a request for the next page - """ - - _options: FinalRequestOptions = PrivateAttr() - _model: Type[_T] = PrivateAttr() - - def has_next_page(self) -> bool: - items = self._get_page_items() - if not items: - return False - return self.next_page_info() is not None - - def next_page_info(self) -> Optional[PageInfo]: ... - - def _get_page_items(self) -> Iterable[_T]: # type: ignore[empty-body] - ... - - def _params_from_url(self, url: URL) -> httpx.QueryParams: - # TODO: do we have to preprocess params here? - return httpx.QueryParams(cast(Any, self._options.params)).merge(url.params) - - def _info_to_options(self, info: PageInfo) -> FinalRequestOptions: - options = model_copy(self._options) - options._strip_raw_response_header() - - if not isinstance(info.params, NotGiven): - options.params = {**options.params, **info.params} - return options - - if not isinstance(info.url, NotGiven): - params = self._params_from_url(info.url) - url = info.url.copy_with(params=params) - options.params = dict(url.params) - options.url = str(url) - return options - - raise ValueError("Unexpected PageInfo state") - - -class BaseSyncPage(BasePage[_T], Generic[_T]): - _client: SyncAPIClient = pydantic.PrivateAttr() - - def _set_private_attributes( - self, - client: SyncAPIClient, - model: Type[_T], - options: FinalRequestOptions, - ) -> None: - self._model = model - self._client = client - self._options = options - - # Pydantic uses a custom `__iter__` method to support casting BaseModels - # to dictionaries. e.g. dict(model). - # As we want to support `for item in page`, this is inherently incompatible - # with the default pydantic behaviour. It is not possible to support both - # use cases at once. Fortunately, this is not a big deal as all other pydantic - # methods should continue to work as expected as there is an alternative method - # to cast a model to a dictionary, model.dict(), which is used internally - # by pydantic. - def __iter__(self) -> Iterator[_T]: # type: ignore - for page in self.iter_pages(): - for item in page._get_page_items(): - yield item - - def iter_pages(self: SyncPageT) -> Iterator[SyncPageT]: - page = self - while True: - yield page - if page.has_next_page(): - page = page.get_next_page() - else: - return - - def get_next_page(self: SyncPageT) -> SyncPageT: - info = self.next_page_info() - if not info: - raise RuntimeError( - "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." - ) - - options = self._info_to_options(info) - return self._client._request_api_list(self._model, page=self.__class__, options=options) - - -class AsyncPaginator(Generic[_T, AsyncPageT]): - def __init__( - self, - client: AsyncAPIClient, - options: FinalRequestOptions, - page_cls: Type[AsyncPageT], - model: Type[_T], - ) -> None: - self._model = model - self._client = client - self._options = options - self._page_cls = page_cls - - def __await__(self) -> Generator[Any, None, AsyncPageT]: - return self._get_page().__await__() - - async def _get_page(self) -> AsyncPageT: - def _parser(resp: AsyncPageT) -> AsyncPageT: - resp._set_private_attributes( - model=self._model, - options=self._options, - client=self._client, - ) - return resp - - self._options.post_parser = _parser - - return await self._client.request(self._page_cls, self._options) - - async def __aiter__(self) -> AsyncIterator[_T]: - # https://github.com/microsoft/pyright/issues/3464 - page = cast( - AsyncPageT, - await self, # type: ignore - ) - async for item in page: - yield item - - -class BaseAsyncPage(BasePage[_T], Generic[_T]): - _client: AsyncAPIClient = pydantic.PrivateAttr() - - def _set_private_attributes( - self, - model: Type[_T], - client: AsyncAPIClient, - options: FinalRequestOptions, - ) -> None: - self._model = model - self._client = client - self._options = options - - async def __aiter__(self) -> AsyncIterator[_T]: - async for page in self.iter_pages(): - for item in page._get_page_items(): - yield item - - async def iter_pages(self: AsyncPageT) -> AsyncIterator[AsyncPageT]: - page = self - while True: - yield page - if page.has_next_page(): - page = await page.get_next_page() - else: - return - - async def get_next_page(self: AsyncPageT) -> AsyncPageT: - info = self.next_page_info() - if not info: - raise RuntimeError( - "No next page expected; please check `.has_next_page()` before calling `.get_next_page()`." - ) - - options = self._info_to_options(info) - return await self._client._request_api_list(self._model, page=self.__class__, options=options) - - -_HttpxClientT = TypeVar("_HttpxClientT", bound=Union[httpx.Client, httpx.AsyncClient]) -_DefaultStreamT = TypeVar("_DefaultStreamT", bound=Union[Stream[Any], AsyncStream[Any]]) - - -class BaseClient(Generic[_HttpxClientT, _DefaultStreamT]): - _client: _HttpxClientT - _version: str - _base_url: URL - max_retries: int - timeout: Union[float, Timeout, None] - _limits: httpx.Limits - _proxies: ProxiesTypes | None - _transport: Transport | AsyncTransport | None - _strict_response_validation: bool - _idempotency_header: str | None - _default_stream_cls: type[_DefaultStreamT] | None = None - - def __init__( - self, - *, - version: str, - base_url: str | URL, - _strict_response_validation: bool, - max_retries: int = DEFAULT_MAX_RETRIES, - timeout: float | Timeout | None = DEFAULT_TIMEOUT, - limits: httpx.Limits, - transport: Transport | AsyncTransport | None, - proxies: ProxiesTypes | None, - custom_headers: Mapping[str, str] | None = None, - custom_query: Mapping[str, object] | None = None, - ) -> None: - self._version = version - self._base_url = self._enforce_trailing_slash(URL(base_url)) - self.max_retries = max_retries - self.timeout = timeout - self._limits = limits - self._proxies = proxies - self._transport = transport - self._custom_headers = custom_headers or {} - self._custom_query = custom_query or {} - self._strict_response_validation = _strict_response_validation - self._idempotency_header = None - self._platform: Platform | None = None - - if max_retries is None: # pyright: ignore[reportUnnecessaryComparison] - raise TypeError( - "max_retries cannot be None. If you want to disable retries, pass `0`; if you want unlimited retries, pass `math.inf` or a very high number; if you want the default behavior, pass `groq.DEFAULT_MAX_RETRIES`" - ) - - def _enforce_trailing_slash(self, url: URL) -> URL: - if url.raw_path.endswith(b"/"): - return url - return url.copy_with(raw_path=url.raw_path + b"/") - - def _make_status_error_from_response( - self, - response: httpx.Response, - ) -> APIStatusError: - if response.is_closed and not response.is_stream_consumed: - # We can't read the response body as it has been closed - # before it was read. This can happen if an event hook - # raises a status error. - body = None - err_msg = f"Error code: {response.status_code}" - else: - err_text = response.text.strip() - body = err_text - - try: - body = json.loads(err_text) - err_msg = f"Error code: {response.status_code} - {body}" - except Exception: - err_msg = err_text or f"Error code: {response.status_code}" - - return self._make_status_error(err_msg, body=body, response=response) - - def _make_status_error( - self, - err_msg: str, - *, - body: object, - response: httpx.Response, - ) -> _exceptions.APIStatusError: - raise NotImplementedError() - - def _remaining_retries( - self, - remaining_retries: Optional[int], - options: FinalRequestOptions, - ) -> int: - return remaining_retries if remaining_retries is not None else options.get_max_retries(self.max_retries) - - def _build_headers(self, options: FinalRequestOptions) -> httpx.Headers: - custom_headers = options.headers or {} - headers_dict = _merge_mappings(self.default_headers, custom_headers) - self._validate_headers(headers_dict, custom_headers) - - # headers are case-insensitive while dictionaries are not. - headers = httpx.Headers(headers_dict) - - idempotency_header = self._idempotency_header - if idempotency_header and options.method.lower() != "get" and idempotency_header not in headers: - headers[idempotency_header] = options.idempotency_key or self._idempotency_key() - - return headers - - def _prepare_url(self, url: str) -> URL: - """ - Merge a URL argument together with any 'base_url' on the client, - to create the URL used for the outgoing request. - """ - # Copied from httpx's `_merge_url` method. - merge_url = URL(url) - if merge_url.is_relative_url: - merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") - return self.base_url.copy_with(raw_path=merge_raw_path) - - return merge_url - - def _make_sse_decoder(self) -> SSEDecoder | SSEBytesDecoder: - return SSEDecoder() - - def _build_request( - self, - options: FinalRequestOptions, - ) -> httpx.Request: - if log.isEnabledFor(logging.DEBUG): - log.debug("Request options: %s", model_dump(options, exclude_unset=True)) - - kwargs: dict[str, Any] = {} - - json_data = options.json_data - if options.extra_json is not None: - if json_data is None: - json_data = cast(Body, options.extra_json) - elif is_mapping(json_data): - json_data = _merge_mappings(json_data, options.extra_json) - else: - raise RuntimeError(f"Unexpected JSON data type, {type(json_data)}, cannot merge with `extra_body`") - - headers = self._build_headers(options) - params = _merge_mappings(self.default_query, options.params) - content_type = headers.get("Content-Type") - files = options.files - - # If the given Content-Type header is multipart/form-data then it - # has to be removed so that httpx can generate the header with - # additional information for us as it has to be in this form - # for the server to be able to correctly parse the request: - # multipart/form-data; boundary=---abc-- - if content_type is not None and content_type.startswith("multipart/form-data"): - if "boundary" not in content_type: - # only remove the header if the boundary hasn't been explicitly set - # as the caller doesn't want httpx to come up with their own boundary - headers.pop("Content-Type") - - # As we are now sending multipart/form-data instead of application/json - # we need to tell httpx to use it, https://www.python-httpx.org/advanced/clients/#multipart-file-encoding - if json_data: - if not is_dict(json_data): - raise TypeError( - f"Expected query input to be a dictionary for multipart requests but got {type(json_data)} instead." - ) - kwargs["data"] = self._serialize_multipartform(json_data) - - # httpx determines whether or not to send a "multipart/form-data" - # request based on the truthiness of the "files" argument. - # This gets around that issue by generating a dict value that - # evaluates to true. - # - # https://github.com/encode/httpx/discussions/2399#discussioncomment-3814186 - if not files: - files = cast(HttpxRequestFiles, ForceMultipartDict()) - - # TODO: report this error to httpx - return self._client.build_request( # pyright: ignore[reportUnknownMemberType] - headers=headers, - timeout=self.timeout if isinstance(options.timeout, NotGiven) else options.timeout, - method=options.method, - url=self._prepare_url(options.url), - # the `Query` type that we use is incompatible with qs' - # `Params` type as it needs to be typed as `Mapping[str, object]` - # so that passing a `TypedDict` doesn't cause an error. - # https://github.com/microsoft/pyright/issues/3526#event-6715453066 - params=self.qs.stringify(cast(Mapping[str, Any], params)) if params else None, - json=json_data, - files=files, - **kwargs, - ) - - def _serialize_multipartform(self, data: Mapping[object, object]) -> dict[str, object]: - items = self.qs.stringify_items( - # TODO: type ignore is required as stringify_items is well typed but we can't be - # well typed without heavy validation. - data, # type: ignore - array_format="brackets", - ) - serialized: dict[str, object] = {} - for key, value in items: - existing = serialized.get(key) - - if not existing: - serialized[key] = value - continue - - # If a value has already been set for this key then that - # means we're sending data like `array[]=[1, 2, 3]` and we - # need to tell httpx that we want to send multiple values with - # the same key which is done by using a list or a tuple. - # - # Note: 2d arrays should never result in the same key at both - # levels so it's safe to assume that if the value is a list, - # it was because we changed it to be a list. - if is_list(existing): - existing.append(value) - else: - serialized[key] = [existing, value] - - return serialized - - def _maybe_override_cast_to(self, cast_to: type[ResponseT], options: FinalRequestOptions) -> type[ResponseT]: - if not is_given(options.headers): - return cast_to - - # make a copy of the headers so we don't mutate user-input - headers = dict(options.headers) - - # we internally support defining a temporary header to override the - # default `cast_to` type for use with `.with_raw_response` and `.with_streaming_response` - # see _response.py for implementation details - override_cast_to = headers.pop(OVERRIDE_CAST_TO_HEADER, NOT_GIVEN) - if is_given(override_cast_to): - options.headers = headers - return cast(Type[ResponseT], override_cast_to) - - return cast_to - - def _should_stream_response_body(self, request: httpx.Request) -> bool: - return request.headers.get(RAW_RESPONSE_HEADER) == "stream" # type: ignore[no-any-return] - - def _process_response_data( - self, - *, - data: object, - cast_to: type[ResponseT], - response: httpx.Response, - ) -> ResponseT: - if data is None: - return cast(ResponseT, None) - - if cast_to is object: - return cast(ResponseT, data) - - try: - if inspect.isclass(cast_to) and issubclass(cast_to, ModelBuilderProtocol): - return cast(ResponseT, cast_to.build(response=response, data=data)) - - if self._strict_response_validation: - return cast(ResponseT, validate_type(type_=cast_to, value=data)) - - return cast(ResponseT, construct_type(type_=cast_to, value=data)) - except pydantic.ValidationError as err: - raise APIResponseValidationError(response=response, body=data) from err - - @property - def qs(self) -> Querystring: - return Querystring() - - @property - def custom_auth(self) -> httpx.Auth | None: - return None - - @property - def auth_headers(self) -> dict[str, str]: - return {} - - @property - def default_headers(self) -> dict[str, str | Omit]: - return { - "Accept": "application/json", - "Content-Type": "application/json", - "User-Agent": self.user_agent, - **self.platform_headers(), - **self.auth_headers, - **self._custom_headers, - } - - @property - def default_query(self) -> dict[str, object]: - return { - **self._custom_query, - } - - def _validate_headers( - self, - headers: Headers, # noqa: ARG002 - custom_headers: Headers, # noqa: ARG002 - ) -> None: - """Validate the given default headers and custom headers. - - Does nothing by default. - """ - return - - @property - def user_agent(self) -> str: - return f"{self.__class__.__name__}/Python {self._version}" - - @property - def base_url(self) -> URL: - return self._base_url - - @base_url.setter - def base_url(self, url: URL | str) -> None: - self._base_url = self._enforce_trailing_slash(url if isinstance(url, URL) else URL(url)) - - def platform_headers(self) -> Dict[str, str]: - # the actual implementation is in a separate `lru_cache` decorated - # function because adding `lru_cache` to methods will leak memory - # https://github.com/python/cpython/issues/88476 - return platform_headers(self._version, platform=self._platform) - - def _parse_retry_after_header(self, response_headers: Optional[httpx.Headers] = None) -> float | None: - """Returns a float of the number of seconds (not milliseconds) to wait after retrying, or None if unspecified. - - About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After - See also https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After#syntax - """ - if response_headers is None: - return None - - # First, try the non-standard `retry-after-ms` header for milliseconds, - # which is more precise than integer-seconds `retry-after` - try: - retry_ms_header = response_headers.get("retry-after-ms", None) - return float(retry_ms_header) / 1000 - except (TypeError, ValueError): - pass - - # Next, try parsing `retry-after` header as seconds (allowing nonstandard floats). - retry_header = response_headers.get("retry-after") - try: - # note: the spec indicates that this should only ever be an integer - # but if someone sends a float there's no reason for us to not respect it - return float(retry_header) - except (TypeError, ValueError): - pass - - # Last, try parsing `retry-after` as a date. - retry_date_tuple = email.utils.parsedate_tz(retry_header) - if retry_date_tuple is None: - return None - - retry_date = email.utils.mktime_tz(retry_date_tuple) - return float(retry_date - time.time()) - - def _calculate_retry_timeout( - self, - remaining_retries: int, - options: FinalRequestOptions, - response_headers: Optional[httpx.Headers] = None, - ) -> float: - max_retries = options.get_max_retries(self.max_retries) - - # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says. - retry_after = self._parse_retry_after_header(response_headers) - if retry_after is not None and 0 < retry_after <= 60: - return retry_after - - nb_retries = max_retries - remaining_retries - - # Apply exponential backoff, but not more than the max. - sleep_seconds = min(INITIAL_RETRY_DELAY * pow(2.0, nb_retries), MAX_RETRY_DELAY) - - # Apply some jitter, plus-or-minus half a second. - jitter = 1 - 0.25 * random() - timeout = sleep_seconds * jitter - return timeout if timeout >= 0 else 0 - - def _should_retry(self, response: httpx.Response) -> bool: - # Note: this is not a standard header - should_retry_header = response.headers.get("x-should-retry") - - # If the server explicitly says whether or not to retry, obey. - if should_retry_header == "true": - log.debug("Retrying as header `x-should-retry` is set to `true`") - return True - if should_retry_header == "false": - log.debug("Not retrying as header `x-should-retry` is set to `false`") - return False - - # Retry on request timeouts. - if response.status_code == 408: - log.debug("Retrying due to status code %i", response.status_code) - return True - - # Retry on lock timeouts. - if response.status_code == 409: - log.debug("Retrying due to status code %i", response.status_code) - return True - - # Retry on rate limits. - if response.status_code == 429: - log.debug("Retrying due to status code %i", response.status_code) - return True - - # Retry internal errors. - if response.status_code >= 500: - log.debug("Retrying due to status code %i", response.status_code) - return True - - log.debug("Not retrying") - return False - - def _idempotency_key(self) -> str: - return f"stainless-python-retry-{uuid.uuid4()}" - - -class _DefaultHttpxClient(httpx.Client): - def __init__(self, **kwargs: Any) -> None: - kwargs.setdefault("timeout", DEFAULT_TIMEOUT) - kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) - kwargs.setdefault("follow_redirects", True) - super().__init__(**kwargs) - - -if TYPE_CHECKING: - DefaultHttpxClient = httpx.Client - """An alias to `httpx.Client` that provides the same defaults that this SDK - uses internally. - - This is useful because overriding the `http_client` with your own instance of - `httpx.Client` will result in httpx's defaults being used, not ours. - """ -else: - DefaultHttpxClient = _DefaultHttpxClient - - -class SyncHttpxClientWrapper(DefaultHttpxClient): - def __del__(self) -> None: - try: - self.close() - except Exception: - pass - - -class SyncAPIClient(BaseClient[httpx.Client, Stream[Any]]): - _client: httpx.Client - _default_stream_cls: type[Stream[Any]] | None = None - - def __init__( - self, - *, - version: str, - base_url: str | URL, - max_retries: int = DEFAULT_MAX_RETRIES, - timeout: float | Timeout | None | NotGiven = NOT_GIVEN, - transport: Transport | None = None, - proxies: ProxiesTypes | None = None, - limits: Limits | None = None, - http_client: httpx.Client | None = None, - custom_headers: Mapping[str, str] | None = None, - custom_query: Mapping[str, object] | None = None, - _strict_response_validation: bool, - ) -> None: - if limits is not None: - warnings.warn( - "The `connection_pool_limits` argument is deprecated. The `http_client` argument should be passed instead", - category=DeprecationWarning, - stacklevel=3, - ) - if http_client is not None: - raise ValueError("The `http_client` argument is mutually exclusive with `connection_pool_limits`") - else: - limits = DEFAULT_CONNECTION_LIMITS - - if transport is not None: - warnings.warn( - "The `transport` argument is deprecated. The `http_client` argument should be passed instead", - category=DeprecationWarning, - stacklevel=3, - ) - if http_client is not None: - raise ValueError("The `http_client` argument is mutually exclusive with `transport`") - - if proxies is not None: - warnings.warn( - "The `proxies` argument is deprecated. The `http_client` argument should be passed instead", - category=DeprecationWarning, - stacklevel=3, - ) - if http_client is not None: - raise ValueError("The `http_client` argument is mutually exclusive with `proxies`") - - if not is_given(timeout): - # if the user passed in a custom http client with a non-default - # timeout set then we use that timeout. - # - # note: there is an edge case here where the user passes in a client - # where they've explicitly set the timeout to match the default timeout - # as this check is structural, meaning that we'll think they didn't - # pass in a timeout and will ignore it - if http_client and http_client.timeout != HTTPX_DEFAULT_TIMEOUT: - timeout = http_client.timeout - else: - timeout = DEFAULT_TIMEOUT - - if http_client is not None and not isinstance(http_client, httpx.Client): # pyright: ignore[reportUnnecessaryIsInstance] - raise TypeError( - f"Invalid `http_client` argument; Expected an instance of `httpx.Client` but got {type(http_client)}" - ) - - super().__init__( - version=version, - limits=limits, - # cast to a valid type because mypy doesn't understand our type narrowing - timeout=cast(Timeout, timeout), - proxies=proxies, - base_url=base_url, - transport=transport, - max_retries=max_retries, - custom_query=custom_query, - custom_headers=custom_headers, - _strict_response_validation=_strict_response_validation, - ) - self._client = http_client or SyncHttpxClientWrapper( - base_url=base_url, - # cast to a valid type because mypy doesn't understand our type narrowing - timeout=cast(Timeout, timeout), - proxies=proxies, - transport=transport, - limits=limits, - follow_redirects=True, - ) - - def is_closed(self) -> bool: - return self._client.is_closed - - def close(self) -> None: - """Close the underlying HTTPX client. - - The client will *not* be usable after this. - """ - # If an error is thrown while constructing a client, self._client - # may not be present - if hasattr(self, "_client"): - self._client.close() - - def __enter__(self: _T) -> _T: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.close() - - def _prepare_options( - self, - options: FinalRequestOptions, # noqa: ARG002 - ) -> FinalRequestOptions: - """Hook for mutating the given options""" - return options - - def _prepare_request( - self, - request: httpx.Request, # noqa: ARG002 - ) -> None: - """This method is used as a callback for mutating the `Request` object - after it has been constructed. - This is useful for cases where you want to add certain headers based off of - the request properties, e.g. `url`, `method` etc. - """ - return None - - @overload - def request( - self, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - remaining_retries: Optional[int] = None, - *, - stream: Literal[True], - stream_cls: Type[_StreamT], - ) -> _StreamT: ... - - @overload - def request( - self, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - remaining_retries: Optional[int] = None, - *, - stream: Literal[False] = False, - ) -> ResponseT: ... - - @overload - def request( - self, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - remaining_retries: Optional[int] = None, - *, - stream: bool = False, - stream_cls: Type[_StreamT] | None = None, - ) -> ResponseT | _StreamT: ... - - def request( - self, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - remaining_retries: Optional[int] = None, - *, - stream: bool = False, - stream_cls: type[_StreamT] | None = None, - ) -> ResponseT | _StreamT: - return self._request( - cast_to=cast_to, - options=options, - stream=stream, - stream_cls=stream_cls, - remaining_retries=remaining_retries, - ) - - def _request( - self, - *, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - remaining_retries: int | None, - stream: bool, - stream_cls: type[_StreamT] | None, - ) -> ResponseT | _StreamT: - # create a copy of the options we were given so that if the - # options are mutated later & we then retry, the retries are - # given the original options - input_options = model_copy(options) - - cast_to = self._maybe_override_cast_to(cast_to, options) - options = self._prepare_options(options) - - retries = self._remaining_retries(remaining_retries, options) - request = self._build_request(options) - self._prepare_request(request) - - kwargs: HttpxSendArgs = {} - if self.custom_auth is not None: - kwargs["auth"] = self.custom_auth - - log.debug("Sending HTTP Request: %s %s", request.method, request.url) - - try: - response = self._client.send( - request, - stream=stream or self._should_stream_response_body(request=request), - **kwargs, - ) - except httpx.TimeoutException as err: - log.debug("Encountered httpx.TimeoutException", exc_info=True) - - if retries > 0: - return self._retry_request( - input_options, - cast_to, - retries, - stream=stream, - stream_cls=stream_cls, - response_headers=None, - ) - - log.debug("Raising timeout error") - raise APITimeoutError(request=request) from err - except Exception as err: - log.debug("Encountered Exception", exc_info=True) - - if retries > 0: - return self._retry_request( - input_options, - cast_to, - retries, - stream=stream, - stream_cls=stream_cls, - response_headers=None, - ) - - log.debug("Raising connection error") - raise APIConnectionError(request=request) from err - - log.debug( - 'HTTP Response: %s %s "%i %s" %s', - request.method, - request.url, - response.status_code, - response.reason_phrase, - response.headers, - ) - - try: - response.raise_for_status() - except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code - log.debug("Encountered httpx.HTTPStatusError", exc_info=True) - - if retries > 0 and self._should_retry(err.response): - err.response.close() - return self._retry_request( - input_options, - cast_to, - retries, - err.response.headers, - stream=stream, - stream_cls=stream_cls, - ) - - # If the response is streamed then we need to explicitly read the response - # to completion before attempting to access the response text. - if not err.response.is_closed: - err.response.read() - - log.debug("Re-raising status error") - raise self._make_status_error_from_response(err.response) from None - - return self._process_response( - cast_to=cast_to, - options=options, - response=response, - stream=stream, - stream_cls=stream_cls, - retries_taken=options.get_max_retries(self.max_retries) - retries, - ) - - def _retry_request( - self, - options: FinalRequestOptions, - cast_to: Type[ResponseT], - remaining_retries: int, - response_headers: httpx.Headers | None, - *, - stream: bool, - stream_cls: type[_StreamT] | None, - ) -> ResponseT | _StreamT: - remaining = remaining_retries - 1 - if remaining == 1: - log.debug("1 retry left") - else: - log.debug("%i retries left", remaining) - - timeout = self._calculate_retry_timeout(remaining, options, response_headers) - log.info("Retrying request to %s in %f seconds", options.url, timeout) - - # In a synchronous context we are blocking the entire thread. Up to the library user to run the client in a - # different thread if necessary. - time.sleep(timeout) - - return self._request( - options=options, - cast_to=cast_to, - remaining_retries=remaining, - stream=stream, - stream_cls=stream_cls, - ) - - def _process_response( - self, - *, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - response: httpx.Response, - stream: bool, - stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, - retries_taken: int = 0, - ) -> ResponseT: - origin = get_origin(cast_to) or cast_to - - if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse): - if not issubclass(origin, APIResponse): - raise TypeError(f"API Response types must subclass {APIResponse}; Received {origin}") - - response_cls = cast("type[BaseAPIResponse[Any]]", cast_to) - return cast( - ResponseT, - response_cls( - raw=response, - client=self, - cast_to=extract_response_type(response_cls), - stream=stream, - stream_cls=stream_cls, - options=options, - retries_taken=retries_taken, - ), - ) - - if cast_to == httpx.Response: - return cast(ResponseT, response) - - api_response = APIResponse( - raw=response, - client=self, - cast_to=cast("type[ResponseT]", cast_to), # pyright: ignore[reportUnnecessaryCast] - stream=stream, - stream_cls=stream_cls, - options=options, - retries_taken=retries_taken, - ) - if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): - return cast(ResponseT, api_response) - - return api_response.parse() - - def _request_api_list( - self, - model: Type[object], - page: Type[SyncPageT], - options: FinalRequestOptions, - ) -> SyncPageT: - def _parser(resp: SyncPageT) -> SyncPageT: - resp._set_private_attributes( - client=self, - model=model, - options=options, - ) - return resp - - options.post_parser = _parser - - return self.request(page, options, stream=False) - - @overload - def get( - self, - path: str, - *, - cast_to: Type[ResponseT], - options: RequestOptions = {}, - stream: Literal[False] = False, - ) -> ResponseT: ... - - @overload - def get( - self, - path: str, - *, - cast_to: Type[ResponseT], - options: RequestOptions = {}, - stream: Literal[True], - stream_cls: type[_StreamT], - ) -> _StreamT: ... - - @overload - def get( - self, - path: str, - *, - cast_to: Type[ResponseT], - options: RequestOptions = {}, - stream: bool, - stream_cls: type[_StreamT] | None = None, - ) -> ResponseT | _StreamT: ... - - def get( - self, - path: str, - *, - cast_to: Type[ResponseT], - options: RequestOptions = {}, - stream: bool = False, - stream_cls: type[_StreamT] | None = None, - ) -> ResponseT | _StreamT: - opts = FinalRequestOptions.construct(method="get", url=path, **options) - # cast is required because mypy complains about returning Any even though - # it understands the type variables - return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) - - @overload - def post( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - options: RequestOptions = {}, - files: RequestFiles | None = None, - stream: Literal[False] = False, - ) -> ResponseT: ... - - @overload - def post( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - options: RequestOptions = {}, - files: RequestFiles | None = None, - stream: Literal[True], - stream_cls: type[_StreamT], - ) -> _StreamT: ... - - @overload - def post( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - options: RequestOptions = {}, - files: RequestFiles | None = None, - stream: bool, - stream_cls: type[_StreamT] | None = None, - ) -> ResponseT | _StreamT: ... - - def post( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - options: RequestOptions = {}, - files: RequestFiles | None = None, - stream: bool = False, - stream_cls: type[_StreamT] | None = None, - ) -> ResponseT | _StreamT: - opts = FinalRequestOptions.construct( - method="post", url=path, json_data=body, files=to_httpx_files(files), **options - ) - return cast(ResponseT, self.request(cast_to, opts, stream=stream, stream_cls=stream_cls)) - - def patch( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - options: RequestOptions = {}, - ) -> ResponseT: - opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) - return self.request(cast_to, opts) - - def put( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - files: RequestFiles | None = None, - options: RequestOptions = {}, - ) -> ResponseT: - opts = FinalRequestOptions.construct( - method="put", url=path, json_data=body, files=to_httpx_files(files), **options - ) - return self.request(cast_to, opts) - - def delete( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - options: RequestOptions = {}, - ) -> ResponseT: - opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) - return self.request(cast_to, opts) - - def get_api_list( - self, - path: str, - *, - model: Type[object], - page: Type[SyncPageT], - body: Body | None = None, - options: RequestOptions = {}, - method: str = "get", - ) -> SyncPageT: - opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) - return self._request_api_list(model, page, opts) - - -class _DefaultAsyncHttpxClient(httpx.AsyncClient): - def __init__(self, **kwargs: Any) -> None: - kwargs.setdefault("timeout", DEFAULT_TIMEOUT) - kwargs.setdefault("limits", DEFAULT_CONNECTION_LIMITS) - kwargs.setdefault("follow_redirects", True) - super().__init__(**kwargs) - - -if TYPE_CHECKING: - DefaultAsyncHttpxClient = httpx.AsyncClient - """An alias to `httpx.AsyncClient` that provides the same defaults that this SDK - uses internally. - - This is useful because overriding the `http_client` with your own instance of - `httpx.AsyncClient` will result in httpx's defaults being used, not ours. - """ -else: - DefaultAsyncHttpxClient = _DefaultAsyncHttpxClient - - -class AsyncHttpxClientWrapper(DefaultAsyncHttpxClient): - def __del__(self) -> None: - try: - # TODO(someday): support non asyncio runtimes here - asyncio.get_running_loop().create_task(self.aclose()) - except Exception: - pass - - -class AsyncAPIClient(BaseClient[httpx.AsyncClient, AsyncStream[Any]]): - _client: httpx.AsyncClient - _default_stream_cls: type[AsyncStream[Any]] | None = None - - def __init__( - self, - *, - version: str, - base_url: str | URL, - _strict_response_validation: bool, - max_retries: int = DEFAULT_MAX_RETRIES, - timeout: float | Timeout | None | NotGiven = NOT_GIVEN, - transport: AsyncTransport | None = None, - proxies: ProxiesTypes | None = None, - limits: Limits | None = None, - http_client: httpx.AsyncClient | None = None, - custom_headers: Mapping[str, str] | None = None, - custom_query: Mapping[str, object] | None = None, - ) -> None: - if limits is not None: - warnings.warn( - "The `connection_pool_limits` argument is deprecated. The `http_client` argument should be passed instead", - category=DeprecationWarning, - stacklevel=3, - ) - if http_client is not None: - raise ValueError("The `http_client` argument is mutually exclusive with `connection_pool_limits`") - else: - limits = DEFAULT_CONNECTION_LIMITS - - if transport is not None: - warnings.warn( - "The `transport` argument is deprecated. The `http_client` argument should be passed instead", - category=DeprecationWarning, - stacklevel=3, - ) - if http_client is not None: - raise ValueError("The `http_client` argument is mutually exclusive with `transport`") - - if proxies is not None: - warnings.warn( - "The `proxies` argument is deprecated. The `http_client` argument should be passed instead", - category=DeprecationWarning, - stacklevel=3, - ) - if http_client is not None: - raise ValueError("The `http_client` argument is mutually exclusive with `proxies`") - - if not is_given(timeout): - # if the user passed in a custom http client with a non-default - # timeout set then we use that timeout. - # - # note: there is an edge case here where the user passes in a client - # where they've explicitly set the timeout to match the default timeout - # as this check is structural, meaning that we'll think they didn't - # pass in a timeout and will ignore it - if http_client and http_client.timeout != HTTPX_DEFAULT_TIMEOUT: - timeout = http_client.timeout - else: - timeout = DEFAULT_TIMEOUT - - if http_client is not None and not isinstance(http_client, httpx.AsyncClient): # pyright: ignore[reportUnnecessaryIsInstance] - raise TypeError( - f"Invalid `http_client` argument; Expected an instance of `httpx.AsyncClient` but got {type(http_client)}" - ) - - super().__init__( - version=version, - base_url=base_url, - limits=limits, - # cast to a valid type because mypy doesn't understand our type narrowing - timeout=cast(Timeout, timeout), - proxies=proxies, - transport=transport, - max_retries=max_retries, - custom_query=custom_query, - custom_headers=custom_headers, - _strict_response_validation=_strict_response_validation, - ) - self._client = http_client or AsyncHttpxClientWrapper( - base_url=base_url, - # cast to a valid type because mypy doesn't understand our type narrowing - timeout=cast(Timeout, timeout), - proxies=proxies, - transport=transport, - limits=limits, - follow_redirects=True, - ) - - def is_closed(self) -> bool: - return self._client.is_closed - - async def close(self) -> None: - """Close the underlying HTTPX client. - - The client will *not* be usable after this. - """ - await self._client.aclose() - - async def __aenter__(self: _T) -> _T: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - await self.close() - - async def _prepare_options( - self, - options: FinalRequestOptions, # noqa: ARG002 - ) -> FinalRequestOptions: - """Hook for mutating the given options""" - return options - - async def _prepare_request( - self, - request: httpx.Request, # noqa: ARG002 - ) -> None: - """This method is used as a callback for mutating the `Request` object - after it has been constructed. - This is useful for cases where you want to add certain headers based off of - the request properties, e.g. `url`, `method` etc. - """ - return None - - @overload - async def request( - self, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - *, - stream: Literal[False] = False, - remaining_retries: Optional[int] = None, - ) -> ResponseT: ... - - @overload - async def request( - self, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - *, - stream: Literal[True], - stream_cls: type[_AsyncStreamT], - remaining_retries: Optional[int] = None, - ) -> _AsyncStreamT: ... - - @overload - async def request( - self, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - *, - stream: bool, - stream_cls: type[_AsyncStreamT] | None = None, - remaining_retries: Optional[int] = None, - ) -> ResponseT | _AsyncStreamT: ... - - async def request( - self, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - *, - stream: bool = False, - stream_cls: type[_AsyncStreamT] | None = None, - remaining_retries: Optional[int] = None, - ) -> ResponseT | _AsyncStreamT: - return await self._request( - cast_to=cast_to, - options=options, - stream=stream, - stream_cls=stream_cls, - remaining_retries=remaining_retries, - ) - - async def _request( - self, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - *, - stream: bool, - stream_cls: type[_AsyncStreamT] | None, - remaining_retries: int | None, - ) -> ResponseT | _AsyncStreamT: - if self._platform is None: - # `get_platform` can make blocking IO calls so we - # execute it earlier while we are in an async context - self._platform = await asyncify(get_platform)() - - # create a copy of the options we were given so that if the - # options are mutated later & we then retry, the retries are - # given the original options - input_options = model_copy(options) - - cast_to = self._maybe_override_cast_to(cast_to, options) - options = await self._prepare_options(options) - - retries = self._remaining_retries(remaining_retries, options) - request = self._build_request(options) - await self._prepare_request(request) - - kwargs: HttpxSendArgs = {} - if self.custom_auth is not None: - kwargs["auth"] = self.custom_auth - - try: - response = await self._client.send( - request, - stream=stream or self._should_stream_response_body(request=request), - **kwargs, - ) - except httpx.TimeoutException as err: - log.debug("Encountered httpx.TimeoutException", exc_info=True) - - if retries > 0: - return await self._retry_request( - input_options, - cast_to, - retries, - stream=stream, - stream_cls=stream_cls, - response_headers=None, - ) - - log.debug("Raising timeout error") - raise APITimeoutError(request=request) from err - except Exception as err: - log.debug("Encountered Exception", exc_info=True) - - if retries > 0: - return await self._retry_request( - input_options, - cast_to, - retries, - stream=stream, - stream_cls=stream_cls, - response_headers=None, - ) - - log.debug("Raising connection error") - raise APIConnectionError(request=request) from err - - log.debug( - 'HTTP Request: %s %s "%i %s"', request.method, request.url, response.status_code, response.reason_phrase - ) - - try: - response.raise_for_status() - except httpx.HTTPStatusError as err: # thrown on 4xx and 5xx status code - log.debug("Encountered httpx.HTTPStatusError", exc_info=True) - - if retries > 0 and self._should_retry(err.response): - await err.response.aclose() - return await self._retry_request( - input_options, - cast_to, - retries, - err.response.headers, - stream=stream, - stream_cls=stream_cls, - ) - - # If the response is streamed then we need to explicitly read the response - # to completion before attempting to access the response text. - if not err.response.is_closed: - await err.response.aread() - - log.debug("Re-raising status error") - raise self._make_status_error_from_response(err.response) from None - - return await self._process_response( - cast_to=cast_to, - options=options, - response=response, - stream=stream, - stream_cls=stream_cls, - retries_taken=options.get_max_retries(self.max_retries) - retries, - ) - - async def _retry_request( - self, - options: FinalRequestOptions, - cast_to: Type[ResponseT], - remaining_retries: int, - response_headers: httpx.Headers | None, - *, - stream: bool, - stream_cls: type[_AsyncStreamT] | None, - ) -> ResponseT | _AsyncStreamT: - remaining = remaining_retries - 1 - if remaining == 1: - log.debug("1 retry left") - else: - log.debug("%i retries left", remaining) - - timeout = self._calculate_retry_timeout(remaining, options, response_headers) - log.info("Retrying request to %s in %f seconds", options.url, timeout) - - await anyio.sleep(timeout) - - return await self._request( - options=options, - cast_to=cast_to, - remaining_retries=remaining, - stream=stream, - stream_cls=stream_cls, - ) - - async def _process_response( - self, - *, - cast_to: Type[ResponseT], - options: FinalRequestOptions, - response: httpx.Response, - stream: bool, - stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, - retries_taken: int = 0, - ) -> ResponseT: - origin = get_origin(cast_to) or cast_to - - if inspect.isclass(origin) and issubclass(origin, BaseAPIResponse): - if not issubclass(origin, AsyncAPIResponse): - raise TypeError(f"API Response types must subclass {AsyncAPIResponse}; Received {origin}") - - response_cls = cast("type[BaseAPIResponse[Any]]", cast_to) - return cast( - "ResponseT", - response_cls( - raw=response, - client=self, - cast_to=extract_response_type(response_cls), - stream=stream, - stream_cls=stream_cls, - options=options, - retries_taken=retries_taken, - ), - ) - - if cast_to == httpx.Response: - return cast(ResponseT, response) - - api_response = AsyncAPIResponse( - raw=response, - client=self, - cast_to=cast("type[ResponseT]", cast_to), # pyright: ignore[reportUnnecessaryCast] - stream=stream, - stream_cls=stream_cls, - options=options, - retries_taken=retries_taken, - ) - if bool(response.request.headers.get(RAW_RESPONSE_HEADER)): - return cast(ResponseT, api_response) - - return await api_response.parse() - - def _request_api_list( - self, - model: Type[_T], - page: Type[AsyncPageT], - options: FinalRequestOptions, - ) -> AsyncPaginator[_T, AsyncPageT]: - return AsyncPaginator(client=self, options=options, page_cls=page, model=model) - - @overload - async def get( - self, - path: str, - *, - cast_to: Type[ResponseT], - options: RequestOptions = {}, - stream: Literal[False] = False, - ) -> ResponseT: ... - - @overload - async def get( - self, - path: str, - *, - cast_to: Type[ResponseT], - options: RequestOptions = {}, - stream: Literal[True], - stream_cls: type[_AsyncStreamT], - ) -> _AsyncStreamT: ... - - @overload - async def get( - self, - path: str, - *, - cast_to: Type[ResponseT], - options: RequestOptions = {}, - stream: bool, - stream_cls: type[_AsyncStreamT] | None = None, - ) -> ResponseT | _AsyncStreamT: ... - - async def get( - self, - path: str, - *, - cast_to: Type[ResponseT], - options: RequestOptions = {}, - stream: bool = False, - stream_cls: type[_AsyncStreamT] | None = None, - ) -> ResponseT | _AsyncStreamT: - opts = FinalRequestOptions.construct(method="get", url=path, **options) - return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) - - @overload - async def post( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - files: RequestFiles | None = None, - options: RequestOptions = {}, - stream: Literal[False] = False, - ) -> ResponseT: ... - - @overload - async def post( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - files: RequestFiles | None = None, - options: RequestOptions = {}, - stream: Literal[True], - stream_cls: type[_AsyncStreamT], - ) -> _AsyncStreamT: ... - - @overload - async def post( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - files: RequestFiles | None = None, - options: RequestOptions = {}, - stream: bool, - stream_cls: type[_AsyncStreamT] | None = None, - ) -> ResponseT | _AsyncStreamT: ... - - async def post( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - files: RequestFiles | None = None, - options: RequestOptions = {}, - stream: bool = False, - stream_cls: type[_AsyncStreamT] | None = None, - ) -> ResponseT | _AsyncStreamT: - opts = FinalRequestOptions.construct( - method="post", url=path, json_data=body, files=await async_to_httpx_files(files), **options - ) - return await self.request(cast_to, opts, stream=stream, stream_cls=stream_cls) - - async def patch( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - options: RequestOptions = {}, - ) -> ResponseT: - opts = FinalRequestOptions.construct(method="patch", url=path, json_data=body, **options) - return await self.request(cast_to, opts) - - async def put( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - files: RequestFiles | None = None, - options: RequestOptions = {}, - ) -> ResponseT: - opts = FinalRequestOptions.construct( - method="put", url=path, json_data=body, files=await async_to_httpx_files(files), **options - ) - return await self.request(cast_to, opts) - - async def delete( - self, - path: str, - *, - cast_to: Type[ResponseT], - body: Body | None = None, - options: RequestOptions = {}, - ) -> ResponseT: - opts = FinalRequestOptions.construct(method="delete", url=path, json_data=body, **options) - return await self.request(cast_to, opts) - - def get_api_list( - self, - path: str, - *, - model: Type[_T], - page: Type[AsyncPageT], - body: Body | None = None, - options: RequestOptions = {}, - method: str = "get", - ) -> AsyncPaginator[_T, AsyncPageT]: - opts = FinalRequestOptions.construct(method=method, url=path, json_data=body, **options) - return self._request_api_list(model, page, opts) - - -def make_request_options( - *, - query: Query | None = None, - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - idempotency_key: str | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - post_parser: PostParser | NotGiven = NOT_GIVEN, -) -> RequestOptions: - """Create a dict of type RequestOptions without keys of NotGiven values.""" - options: RequestOptions = {} - if extra_headers is not None: - options["headers"] = extra_headers - - if extra_body is not None: - options["extra_json"] = cast(AnyMapping, extra_body) - - if query is not None: - options["params"] = query - - if extra_query is not None: - options["params"] = {**options.get("params", {}), **extra_query} - - if not isinstance(timeout, NotGiven): - options["timeout"] = timeout - - if idempotency_key is not None: - options["idempotency_key"] = idempotency_key - - if is_given(post_parser): - # internal - options["post_parser"] = post_parser # type: ignore - - return options - - -class ForceMultipartDict(Dict[str, None]): - def __bool__(self) -> bool: - return True - - -class OtherPlatform: - def __init__(self, name: str) -> None: - self.name = name - - @override - def __str__(self) -> str: - return f"Other:{self.name}" - - -Platform = Union[ - OtherPlatform, - Literal[ - "MacOS", - "Linux", - "Windows", - "FreeBSD", - "OpenBSD", - "iOS", - "Android", - "Unknown", - ], -] - - -def get_platform() -> Platform: - try: - system = platform.system().lower() - platform_name = platform.platform().lower() - except Exception: - return "Unknown" - - if "iphone" in platform_name or "ipad" in platform_name: - # Tested using Python3IDE on an iPhone 11 and Pythonista on an iPad 7 - # system is Darwin and platform_name is a string like: - # - Darwin-21.6.0-iPhone12,1-64bit - # - Darwin-21.6.0-iPad7,11-64bit - return "iOS" - - if system == "darwin": - return "MacOS" - - if system == "windows": - return "Windows" - - if "android" in platform_name: - # Tested using Pydroid 3 - # system is Linux and platform_name is a string like 'Linux-5.10.81-android12-9-00001-geba40aecb3b7-ab8534902-aarch64-with-libc' - return "Android" - - if system == "linux": - # https://distro.readthedocs.io/en/latest/#distro.id - distro_id = distro.id() - if distro_id == "freebsd": - return "FreeBSD" - - if distro_id == "openbsd": - return "OpenBSD" - - return "Linux" - - if platform_name: - return OtherPlatform(platform_name) - - return "Unknown" - - -@lru_cache(maxsize=None) -def platform_headers(version: str, *, platform: Platform | None) -> Dict[str, str]: - return { - "X-Stainless-Lang": "python", - "X-Stainless-Package-Version": version, - "X-Stainless-OS": str(platform or get_platform()), - "X-Stainless-Arch": str(get_architecture()), - "X-Stainless-Runtime": get_python_runtime(), - "X-Stainless-Runtime-Version": get_python_version(), - } - - -class OtherArch: - def __init__(self, name: str) -> None: - self.name = name - - @override - def __str__(self) -> str: - return f"other:{self.name}" - - -Arch = Union[OtherArch, Literal["x32", "x64", "arm", "arm64", "unknown"]] - - -def get_python_runtime() -> str: - try: - return platform.python_implementation() - except Exception: - return "unknown" - - -def get_python_version() -> str: - try: - return platform.python_version() - except Exception: - return "unknown" - - -def get_architecture() -> Arch: - try: - machine = platform.machine().lower() - except Exception: - return "unknown" - - if machine in ("arm64", "aarch64"): - return "arm64" - - # TODO: untested - if machine == "arm": - return "arm" - - if machine == "x86_64": - return "x64" - - # TODO: untested - if sys.maxsize <= 2**32: - return "x32" - - if machine: - return OtherArch(machine) - - return "unknown" - - -def _merge_mappings( - obj1: Mapping[_T_co, Union[_T, Omit]], - obj2: Mapping[_T_co, Union[_T, Omit]], -) -> Dict[_T_co, _T]: - """Merge two mappings of the same type, removing any values that are instances of `Omit`. - - In cases with duplicate keys the second mapping takes precedence. - """ - merged = {**obj1, **obj2} - return {key: value for key, value in merged.items() if not isinstance(value, Omit)} diff --git a/myenv/Lib/site-packages/groq/_client.py b/myenv/Lib/site-packages/groq/_client.py deleted file mode 100644 index 87f6486..0000000 --- a/myenv/Lib/site-packages/groq/_client.py +++ /dev/null @@ -1,430 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import os -from typing import Any, Union, Mapping -from typing_extensions import Self, override - -import httpx - -from . import resources, _exceptions -from ._qs import Querystring -from ._types import ( - NOT_GIVEN, - Omit, - Timeout, - NotGiven, - Transport, - ProxiesTypes, - RequestOptions, -) -from ._utils import ( - is_given, - get_async_library, -) -from ._version import __version__ -from ._streaming import Stream as Stream, AsyncStream as AsyncStream -from ._exceptions import GroqError, APIStatusError -from ._base_client import ( - DEFAULT_MAX_RETRIES, - SyncAPIClient, - AsyncAPIClient, -) - -__all__ = [ - "Timeout", - "Transport", - "ProxiesTypes", - "RequestOptions", - "resources", - "Groq", - "AsyncGroq", - "Client", - "AsyncClient", -] - - -class Groq(SyncAPIClient): - chat: resources.Chat - embeddings: resources.Embeddings - audio: resources.Audio - models: resources.Models - with_raw_response: GroqWithRawResponse - with_streaming_response: GroqWithStreamedResponse - - # client options - api_key: str - - def __init__( - self, - *, - api_key: str | None = None, - base_url: str | httpx.URL | None = None, - timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN, - max_retries: int = DEFAULT_MAX_RETRIES, - default_headers: Mapping[str, str] | None = None, - default_query: Mapping[str, object] | None = None, - # Configure a custom httpx client. - # We provide a `DefaultHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`. - # See the [httpx documentation](https://www.python-httpx.org/api/#client) for more details. - http_client: httpx.Client | None = None, - # Enable or disable schema validation for data returned by the API. - # When enabled an error APIResponseValidationError is raised - # if the API responds with invalid data for the expected schema. - # - # This parameter may be removed or changed in the future. - # If you rely on this feature, please open a GitHub issue - # outlining your use-case to help us decide if it should be - # part of our public interface in the future. - _strict_response_validation: bool = False, - ) -> None: - """Construct a new synchronous groq client instance. - - This automatically infers the `api_key` argument from the `GROQ_API_KEY` environment variable if it is not provided. - """ - if api_key is None: - api_key = os.environ.get("GROQ_API_KEY") - if api_key is None: - raise GroqError( - "The api_key client option must be set either by passing api_key to the client or by setting the GROQ_API_KEY environment variable" - ) - self.api_key = api_key - - if base_url is None: - base_url = os.environ.get("GROQ_BASE_URL") - if base_url is None: - base_url = f"https://api.groq.com" - - super().__init__( - version=__version__, - base_url=base_url, - max_retries=max_retries, - timeout=timeout, - http_client=http_client, - custom_headers=default_headers, - custom_query=default_query, - _strict_response_validation=_strict_response_validation, - ) - - self.chat = resources.Chat(self) - self.embeddings = resources.Embeddings(self) - self.audio = resources.Audio(self) - self.models = resources.Models(self) - self.with_raw_response = GroqWithRawResponse(self) - self.with_streaming_response = GroqWithStreamedResponse(self) - - @property - @override - def qs(self) -> Querystring: - return Querystring(array_format="comma") - - @property - @override - def auth_headers(self) -> dict[str, str]: - api_key = self.api_key - return {"Authorization": f"Bearer {api_key}"} - - @property - @override - def default_headers(self) -> dict[str, str | Omit]: - return { - **super().default_headers, - "X-Stainless-Async": "false", - **self._custom_headers, - } - - def copy( - self, - *, - api_key: str | None = None, - base_url: str | httpx.URL | None = None, - timeout: float | Timeout | None | NotGiven = NOT_GIVEN, - http_client: httpx.Client | None = None, - max_retries: int | NotGiven = NOT_GIVEN, - default_headers: Mapping[str, str] | None = None, - set_default_headers: Mapping[str, str] | None = None, - default_query: Mapping[str, object] | None = None, - set_default_query: Mapping[str, object] | None = None, - _extra_kwargs: Mapping[str, Any] = {}, - ) -> Self: - """ - Create a new client instance re-using the same options given to the current client with optional overriding. - """ - if default_headers is not None and set_default_headers is not None: - raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") - - if default_query is not None and set_default_query is not None: - raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") - - headers = self._custom_headers - if default_headers is not None: - headers = {**headers, **default_headers} - elif set_default_headers is not None: - headers = set_default_headers - - params = self._custom_query - if default_query is not None: - params = {**params, **default_query} - elif set_default_query is not None: - params = set_default_query - - http_client = http_client or self._client - return self.__class__( - api_key=api_key or self.api_key, - base_url=base_url or self.base_url, - timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, - http_client=http_client, - max_retries=max_retries if is_given(max_retries) else self.max_retries, - default_headers=headers, - default_query=params, - **_extra_kwargs, - ) - - # Alias for `copy` for nicer inline usage, e.g. - # client.with_options(timeout=10).foo.create(...) - with_options = copy - - @override - def _make_status_error( - self, - err_msg: str, - *, - body: object, - response: httpx.Response, - ) -> APIStatusError: - if response.status_code == 400: - return _exceptions.BadRequestError(err_msg, response=response, body=body) - - if response.status_code == 401: - return _exceptions.AuthenticationError(err_msg, response=response, body=body) - - if response.status_code == 403: - return _exceptions.PermissionDeniedError(err_msg, response=response, body=body) - - if response.status_code == 404: - return _exceptions.NotFoundError(err_msg, response=response, body=body) - - if response.status_code == 409: - return _exceptions.ConflictError(err_msg, response=response, body=body) - - if response.status_code == 422: - return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body) - - if response.status_code == 429: - return _exceptions.RateLimitError(err_msg, response=response, body=body) - - if response.status_code >= 500: - return _exceptions.InternalServerError(err_msg, response=response, body=body) - return APIStatusError(err_msg, response=response, body=body) - - -class AsyncGroq(AsyncAPIClient): - chat: resources.AsyncChat - embeddings: resources.AsyncEmbeddings - audio: resources.AsyncAudio - models: resources.AsyncModels - with_raw_response: AsyncGroqWithRawResponse - with_streaming_response: AsyncGroqWithStreamedResponse - - # client options - api_key: str - - def __init__( - self, - *, - api_key: str | None = None, - base_url: str | httpx.URL | None = None, - timeout: Union[float, Timeout, None, NotGiven] = NOT_GIVEN, - max_retries: int = DEFAULT_MAX_RETRIES, - default_headers: Mapping[str, str] | None = None, - default_query: Mapping[str, object] | None = None, - # Configure a custom httpx client. - # We provide a `DefaultAsyncHttpxClient` class that you can pass to retain the default values we use for `limits`, `timeout` & `follow_redirects`. - # See the [httpx documentation](https://www.python-httpx.org/api/#asyncclient) for more details. - http_client: httpx.AsyncClient | None = None, - # Enable or disable schema validation for data returned by the API. - # When enabled an error APIResponseValidationError is raised - # if the API responds with invalid data for the expected schema. - # - # This parameter may be removed or changed in the future. - # If you rely on this feature, please open a GitHub issue - # outlining your use-case to help us decide if it should be - # part of our public interface in the future. - _strict_response_validation: bool = False, - ) -> None: - """Construct a new async groq client instance. - - This automatically infers the `api_key` argument from the `GROQ_API_KEY` environment variable if it is not provided. - """ - if api_key is None: - api_key = os.environ.get("GROQ_API_KEY") - if api_key is None: - raise GroqError( - "The api_key client option must be set either by passing api_key to the client or by setting the GROQ_API_KEY environment variable" - ) - self.api_key = api_key - - if base_url is None: - base_url = os.environ.get("GROQ_BASE_URL") - if base_url is None: - base_url = f"https://api.groq.com" - - super().__init__( - version=__version__, - base_url=base_url, - max_retries=max_retries, - timeout=timeout, - http_client=http_client, - custom_headers=default_headers, - custom_query=default_query, - _strict_response_validation=_strict_response_validation, - ) - - self.chat = resources.AsyncChat(self) - self.embeddings = resources.AsyncEmbeddings(self) - self.audio = resources.AsyncAudio(self) - self.models = resources.AsyncModels(self) - self.with_raw_response = AsyncGroqWithRawResponse(self) - self.with_streaming_response = AsyncGroqWithStreamedResponse(self) - - @property - @override - def qs(self) -> Querystring: - return Querystring(array_format="comma") - - @property - @override - def auth_headers(self) -> dict[str, str]: - api_key = self.api_key - return {"Authorization": f"Bearer {api_key}"} - - @property - @override - def default_headers(self) -> dict[str, str | Omit]: - return { - **super().default_headers, - "X-Stainless-Async": f"async:{get_async_library()}", - **self._custom_headers, - } - - def copy( - self, - *, - api_key: str | None = None, - base_url: str | httpx.URL | None = None, - timeout: float | Timeout | None | NotGiven = NOT_GIVEN, - http_client: httpx.AsyncClient | None = None, - max_retries: int | NotGiven = NOT_GIVEN, - default_headers: Mapping[str, str] | None = None, - set_default_headers: Mapping[str, str] | None = None, - default_query: Mapping[str, object] | None = None, - set_default_query: Mapping[str, object] | None = None, - _extra_kwargs: Mapping[str, Any] = {}, - ) -> Self: - """ - Create a new client instance re-using the same options given to the current client with optional overriding. - """ - if default_headers is not None and set_default_headers is not None: - raise ValueError("The `default_headers` and `set_default_headers` arguments are mutually exclusive") - - if default_query is not None and set_default_query is not None: - raise ValueError("The `default_query` and `set_default_query` arguments are mutually exclusive") - - headers = self._custom_headers - if default_headers is not None: - headers = {**headers, **default_headers} - elif set_default_headers is not None: - headers = set_default_headers - - params = self._custom_query - if default_query is not None: - params = {**params, **default_query} - elif set_default_query is not None: - params = set_default_query - - http_client = http_client or self._client - return self.__class__( - api_key=api_key or self.api_key, - base_url=base_url or self.base_url, - timeout=self.timeout if isinstance(timeout, NotGiven) else timeout, - http_client=http_client, - max_retries=max_retries if is_given(max_retries) else self.max_retries, - default_headers=headers, - default_query=params, - **_extra_kwargs, - ) - - # Alias for `copy` for nicer inline usage, e.g. - # client.with_options(timeout=10).foo.create(...) - with_options = copy - - @override - def _make_status_error( - self, - err_msg: str, - *, - body: object, - response: httpx.Response, - ) -> APIStatusError: - if response.status_code == 400: - return _exceptions.BadRequestError(err_msg, response=response, body=body) - - if response.status_code == 401: - return _exceptions.AuthenticationError(err_msg, response=response, body=body) - - if response.status_code == 403: - return _exceptions.PermissionDeniedError(err_msg, response=response, body=body) - - if response.status_code == 404: - return _exceptions.NotFoundError(err_msg, response=response, body=body) - - if response.status_code == 409: - return _exceptions.ConflictError(err_msg, response=response, body=body) - - if response.status_code == 422: - return _exceptions.UnprocessableEntityError(err_msg, response=response, body=body) - - if response.status_code == 429: - return _exceptions.RateLimitError(err_msg, response=response, body=body) - - if response.status_code >= 500: - return _exceptions.InternalServerError(err_msg, response=response, body=body) - return APIStatusError(err_msg, response=response, body=body) - - -class GroqWithRawResponse: - def __init__(self, client: Groq) -> None: - self.chat = resources.ChatWithRawResponse(client.chat) - self.embeddings = resources.EmbeddingsWithRawResponse(client.embeddings) - self.audio = resources.AudioWithRawResponse(client.audio) - self.models = resources.ModelsWithRawResponse(client.models) - - -class AsyncGroqWithRawResponse: - def __init__(self, client: AsyncGroq) -> None: - self.chat = resources.AsyncChatWithRawResponse(client.chat) - self.embeddings = resources.AsyncEmbeddingsWithRawResponse(client.embeddings) - self.audio = resources.AsyncAudioWithRawResponse(client.audio) - self.models = resources.AsyncModelsWithRawResponse(client.models) - - -class GroqWithStreamedResponse: - def __init__(self, client: Groq) -> None: - self.chat = resources.ChatWithStreamingResponse(client.chat) - self.embeddings = resources.EmbeddingsWithStreamingResponse(client.embeddings) - self.audio = resources.AudioWithStreamingResponse(client.audio) - self.models = resources.ModelsWithStreamingResponse(client.models) - - -class AsyncGroqWithStreamedResponse: - def __init__(self, client: AsyncGroq) -> None: - self.chat = resources.AsyncChatWithStreamingResponse(client.chat) - self.embeddings = resources.AsyncEmbeddingsWithStreamingResponse(client.embeddings) - self.audio = resources.AsyncAudioWithStreamingResponse(client.audio) - self.models = resources.AsyncModelsWithStreamingResponse(client.models) - - -Client = Groq - -AsyncClient = AsyncGroq diff --git a/myenv/Lib/site-packages/groq/_compat.py b/myenv/Lib/site-packages/groq/_compat.py deleted file mode 100644 index 21fe694..0000000 --- a/myenv/Lib/site-packages/groq/_compat.py +++ /dev/null @@ -1,217 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Any, Union, Generic, TypeVar, Callable, cast, overload -from datetime import date, datetime -from typing_extensions import Self - -import pydantic -from pydantic.fields import FieldInfo - -from ._types import IncEx, StrBytesIntFloat - -_T = TypeVar("_T") -_ModelT = TypeVar("_ModelT", bound=pydantic.BaseModel) - -# --------------- Pydantic v2 compatibility --------------- - -# Pyright incorrectly reports some of our functions as overriding a method when they don't -# pyright: reportIncompatibleMethodOverride=false - -PYDANTIC_V2 = pydantic.VERSION.startswith("2.") - -# v1 re-exports -if TYPE_CHECKING: - - def parse_date(value: date | StrBytesIntFloat) -> date: # noqa: ARG001 - ... - - def parse_datetime(value: Union[datetime, StrBytesIntFloat]) -> datetime: # noqa: ARG001 - ... - - def get_args(t: type[Any]) -> tuple[Any, ...]: # noqa: ARG001 - ... - - def is_union(tp: type[Any] | None) -> bool: # noqa: ARG001 - ... - - def get_origin(t: type[Any]) -> type[Any] | None: # noqa: ARG001 - ... - - def is_literal_type(type_: type[Any]) -> bool: # noqa: ARG001 - ... - - def is_typeddict(type_: type[Any]) -> bool: # noqa: ARG001 - ... - -else: - if PYDANTIC_V2: - from pydantic.v1.typing import ( - get_args as get_args, - is_union as is_union, - get_origin as get_origin, - is_typeddict as is_typeddict, - is_literal_type as is_literal_type, - ) - from pydantic.v1.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime - else: - from pydantic.typing import ( - get_args as get_args, - is_union as is_union, - get_origin as get_origin, - is_typeddict as is_typeddict, - is_literal_type as is_literal_type, - ) - from pydantic.datetime_parse import parse_date as parse_date, parse_datetime as parse_datetime - - -# refactored config -if TYPE_CHECKING: - from pydantic import ConfigDict as ConfigDict -else: - if PYDANTIC_V2: - from pydantic import ConfigDict - else: - # TODO: provide an error message here? - ConfigDict = None - - -# renamed methods / properties -def parse_obj(model: type[_ModelT], value: object) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(value) - else: - return cast(_ModelT, model.parse_obj(value)) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - - -def field_is_required(field: FieldInfo) -> bool: - if PYDANTIC_V2: - return field.is_required() - return field.required # type: ignore - - -def field_get_default(field: FieldInfo) -> Any: - value = field.get_default() - if PYDANTIC_V2: - from pydantic_core import PydanticUndefined - - if value == PydanticUndefined: - return None - return value - return value - - -def field_outer_type(field: FieldInfo) -> Any: - if PYDANTIC_V2: - return field.annotation - return field.outer_type_ # type: ignore - - -def get_model_config(model: type[pydantic.BaseModel]) -> Any: - if PYDANTIC_V2: - return model.model_config - return model.__config__ # type: ignore - - -def get_model_fields(model: type[pydantic.BaseModel]) -> dict[str, FieldInfo]: - if PYDANTIC_V2: - return model.model_fields - return model.__fields__ # type: ignore - - -def model_copy(model: _ModelT, *, deep: bool = False) -> _ModelT: - if PYDANTIC_V2: - return model.model_copy(deep=deep) - return model.copy(deep=deep) # type: ignore - - -def model_json(model: pydantic.BaseModel, *, indent: int | None = None) -> str: - if PYDANTIC_V2: - return model.model_dump_json(indent=indent) - return model.json(indent=indent) # type: ignore - - -def model_dump( - model: pydantic.BaseModel, - *, - exclude: IncEx = None, - exclude_unset: bool = False, - exclude_defaults: bool = False, -) -> dict[str, Any]: - if PYDANTIC_V2: - return model.model_dump( - exclude=exclude, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - ) - return cast( - "dict[str, Any]", - model.dict( # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - exclude=exclude, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - ), - ) - - -def model_parse(model: type[_ModelT], data: Any) -> _ModelT: - if PYDANTIC_V2: - return model.model_validate(data) - return model.parse_obj(data) # pyright: ignore[reportDeprecated] - - -# generic models -if TYPE_CHECKING: - - class GenericModel(pydantic.BaseModel): ... - -else: - if PYDANTIC_V2: - # there no longer needs to be a distinction in v2 but - # we still have to create our own subclass to avoid - # inconsistent MRO ordering errors - class GenericModel(pydantic.BaseModel): ... - - else: - import pydantic.generics - - class GenericModel(pydantic.generics.GenericModel, pydantic.BaseModel): ... - - -# cached properties -if TYPE_CHECKING: - cached_property = property - - # we define a separate type (copied from typeshed) - # that represents that `cached_property` is `set`able - # at runtime, which differs from `@property`. - # - # this is a separate type as editors likely special case - # `@property` and we don't want to cause issues just to have - # more helpful internal types. - - class typed_cached_property(Generic[_T]): - func: Callable[[Any], _T] - attrname: str | None - - def __init__(self, func: Callable[[Any], _T]) -> None: ... - - @overload - def __get__(self, instance: None, owner: type[Any] | None = None) -> Self: ... - - @overload - def __get__(self, instance: object, owner: type[Any] | None = None) -> _T: ... - - def __get__(self, instance: object, owner: type[Any] | None = None) -> _T | Self: - raise NotImplementedError() - - def __set_name__(self, owner: type[Any], name: str) -> None: ... - - # __set__ is not defined at runtime, but @cached_property is designed to be settable - def __set__(self, instance: object, value: _T) -> None: ... -else: - try: - from functools import cached_property as cached_property - except ImportError: - from cached_property import cached_property as cached_property - - typed_cached_property = cached_property diff --git a/myenv/Lib/site-packages/groq/_constants.py b/myenv/Lib/site-packages/groq/_constants.py deleted file mode 100644 index a2ac3b6..0000000 --- a/myenv/Lib/site-packages/groq/_constants.py +++ /dev/null @@ -1,14 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -import httpx - -RAW_RESPONSE_HEADER = "X-Stainless-Raw-Response" -OVERRIDE_CAST_TO_HEADER = "____stainless_override_cast_to" - -# default timeout is 1 minute -DEFAULT_TIMEOUT = httpx.Timeout(timeout=60.0, connect=5.0) -DEFAULT_MAX_RETRIES = 2 -DEFAULT_CONNECTION_LIMITS = httpx.Limits(max_connections=100, max_keepalive_connections=20) - -INITIAL_RETRY_DELAY = 0.5 -MAX_RETRY_DELAY = 8.0 diff --git a/myenv/Lib/site-packages/groq/_exceptions.py b/myenv/Lib/site-packages/groq/_exceptions.py deleted file mode 100644 index ca69070..0000000 --- a/myenv/Lib/site-packages/groq/_exceptions.py +++ /dev/null @@ -1,108 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal - -import httpx - -__all__ = [ - "BadRequestError", - "AuthenticationError", - "PermissionDeniedError", - "NotFoundError", - "ConflictError", - "UnprocessableEntityError", - "RateLimitError", - "InternalServerError", -] - - -class GroqError(Exception): - pass - - -class APIError(GroqError): - message: str - request: httpx.Request - - body: object | None - """The API response body. - - If the API responded with a valid JSON structure then this property will be the - decoded result. - - If it isn't a valid JSON structure then this will be the raw response. - - If there was no response associated with this error then it will be `None`. - """ - - def __init__(self, message: str, request: httpx.Request, *, body: object | None) -> None: # noqa: ARG002 - super().__init__(message) - self.request = request - self.message = message - self.body = body - - -class APIResponseValidationError(APIError): - response: httpx.Response - status_code: int - - def __init__(self, response: httpx.Response, body: object | None, *, message: str | None = None) -> None: - super().__init__(message or "Data returned by API invalid for expected schema.", response.request, body=body) - self.response = response - self.status_code = response.status_code - - -class APIStatusError(APIError): - """Raised when an API response has a status code of 4xx or 5xx.""" - - response: httpx.Response - status_code: int - - def __init__(self, message: str, *, response: httpx.Response, body: object | None) -> None: - super().__init__(message, response.request, body=body) - self.response = response - self.status_code = response.status_code - - -class APIConnectionError(APIError): - def __init__(self, *, message: str = "Connection error.", request: httpx.Request) -> None: - super().__init__(message, request, body=None) - - -class APITimeoutError(APIConnectionError): - def __init__(self, request: httpx.Request) -> None: - super().__init__(message="Request timed out.", request=request) - - -class BadRequestError(APIStatusError): - status_code: Literal[400] = 400 # pyright: ignore[reportIncompatibleVariableOverride] - - -class AuthenticationError(APIStatusError): - status_code: Literal[401] = 401 # pyright: ignore[reportIncompatibleVariableOverride] - - -class PermissionDeniedError(APIStatusError): - status_code: Literal[403] = 403 # pyright: ignore[reportIncompatibleVariableOverride] - - -class NotFoundError(APIStatusError): - status_code: Literal[404] = 404 # pyright: ignore[reportIncompatibleVariableOverride] - - -class ConflictError(APIStatusError): - status_code: Literal[409] = 409 # pyright: ignore[reportIncompatibleVariableOverride] - - -class UnprocessableEntityError(APIStatusError): - status_code: Literal[422] = 422 # pyright: ignore[reportIncompatibleVariableOverride] - - -class RateLimitError(APIStatusError): - status_code: Literal[429] = 429 # pyright: ignore[reportIncompatibleVariableOverride] - - -class InternalServerError(APIStatusError): - pass diff --git a/myenv/Lib/site-packages/groq/_files.py b/myenv/Lib/site-packages/groq/_files.py deleted file mode 100644 index 715cc20..0000000 --- a/myenv/Lib/site-packages/groq/_files.py +++ /dev/null @@ -1,123 +0,0 @@ -from __future__ import annotations - -import io -import os -import pathlib -from typing import overload -from typing_extensions import TypeGuard - -import anyio - -from ._types import ( - FileTypes, - FileContent, - RequestFiles, - HttpxFileTypes, - Base64FileInput, - HttpxFileContent, - HttpxRequestFiles, -) -from ._utils import is_tuple_t, is_mapping_t, is_sequence_t - - -def is_base64_file_input(obj: object) -> TypeGuard[Base64FileInput]: - return isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) - - -def is_file_content(obj: object) -> TypeGuard[FileContent]: - return ( - isinstance(obj, bytes) or isinstance(obj, tuple) or isinstance(obj, io.IOBase) or isinstance(obj, os.PathLike) - ) - - -def assert_is_file_content(obj: object, *, key: str | None = None) -> None: - if not is_file_content(obj): - prefix = f"Expected entry at `{key}`" if key is not None else f"Expected file input `{obj!r}`" - raise RuntimeError( - f"{prefix} to be bytes, an io.IOBase instance, PathLike or a tuple but received {type(obj)} instead." - ) from None - - -@overload -def to_httpx_files(files: None) -> None: ... - - -@overload -def to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... - - -def to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: - if files is None: - return None - - if is_mapping_t(files): - files = {key: _transform_file(file) for key, file in files.items()} - elif is_sequence_t(files): - files = [(key, _transform_file(file)) for key, file in files] - else: - raise TypeError(f"Unexpected file type input {type(files)}, expected mapping or sequence") - - return files - - -def _transform_file(file: FileTypes) -> HttpxFileTypes: - if is_file_content(file): - if isinstance(file, os.PathLike): - path = pathlib.Path(file) - return (path.name, path.read_bytes()) - - return file - - if is_tuple_t(file): - return (file[0], _read_file_content(file[1]), *file[2:]) - - raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") - - -def _read_file_content(file: FileContent) -> HttpxFileContent: - if isinstance(file, os.PathLike): - return pathlib.Path(file).read_bytes() - return file - - -@overload -async def async_to_httpx_files(files: None) -> None: ... - - -@overload -async def async_to_httpx_files(files: RequestFiles) -> HttpxRequestFiles: ... - - -async def async_to_httpx_files(files: RequestFiles | None) -> HttpxRequestFiles | None: - if files is None: - return None - - if is_mapping_t(files): - files = {key: await _async_transform_file(file) for key, file in files.items()} - elif is_sequence_t(files): - files = [(key, await _async_transform_file(file)) for key, file in files] - else: - raise TypeError("Unexpected file type input {type(files)}, expected mapping or sequence") - - return files - - -async def _async_transform_file(file: FileTypes) -> HttpxFileTypes: - if is_file_content(file): - if isinstance(file, os.PathLike): - path = anyio.Path(file) - return (path.name, await path.read_bytes()) - - return file - - if is_tuple_t(file): - return (file[0], await _async_read_file_content(file[1]), *file[2:]) - - raise TypeError(f"Expected file types input to be a FileContent type or to be a tuple") - - -async def _async_read_file_content(file: FileContent) -> HttpxFileContent: - if isinstance(file, os.PathLike): - return await anyio.Path(file).read_bytes() - - return file diff --git a/myenv/Lib/site-packages/groq/_models.py b/myenv/Lib/site-packages/groq/_models.py deleted file mode 100644 index d386eaa..0000000 --- a/myenv/Lib/site-packages/groq/_models.py +++ /dev/null @@ -1,785 +0,0 @@ -from __future__ import annotations - -import os -import inspect -from typing import TYPE_CHECKING, Any, Type, Union, Generic, TypeVar, Callable, cast -from datetime import date, datetime -from typing_extensions import ( - Unpack, - Literal, - ClassVar, - Protocol, - Required, - ParamSpec, - TypedDict, - TypeGuard, - final, - override, - runtime_checkable, -) - -import pydantic -import pydantic.generics -from pydantic.fields import FieldInfo - -from ._types import ( - Body, - IncEx, - Query, - ModelT, - Headers, - Timeout, - NotGiven, - AnyMapping, - HttpxRequestFiles, -) -from ._utils import ( - PropertyInfo, - is_list, - is_given, - lru_cache, - is_mapping, - parse_date, - coerce_boolean, - parse_datetime, - strip_not_given, - extract_type_arg, - is_annotated_type, - strip_annotated_type, -) -from ._compat import ( - PYDANTIC_V2, - ConfigDict, - GenericModel as BaseGenericModel, - get_args, - is_union, - parse_obj, - get_origin, - is_literal_type, - get_model_config, - get_model_fields, - field_get_default, -) -from ._constants import RAW_RESPONSE_HEADER - -if TYPE_CHECKING: - from pydantic_core.core_schema import ModelField, LiteralSchema, ModelFieldsSchema - -__all__ = ["BaseModel", "GenericModel"] - -_T = TypeVar("_T") -_BaseModelT = TypeVar("_BaseModelT", bound="BaseModel") - -P = ParamSpec("P") - - -@runtime_checkable -class _ConfigProtocol(Protocol): - allow_population_by_field_name: bool - - -class BaseModel(pydantic.BaseModel): - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict( - extra="allow", defer_build=coerce_boolean(os.environ.get("DEFER_PYDANTIC_BUILD", "true")) - ) - else: - - @property - @override - def model_fields_set(self) -> set[str]: - # a forwards-compat shim for pydantic v2 - return self.__fields_set__ # type: ignore - - class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] - extra: Any = pydantic.Extra.allow # type: ignore - - def to_dict( - self, - *, - mode: Literal["json", "python"] = "python", - use_api_names: bool = True, - exclude_unset: bool = True, - exclude_defaults: bool = False, - exclude_none: bool = False, - warnings: bool = True, - ) -> dict[str, object]: - """Recursively generate a dictionary representation of the model, optionally specifying which fields to include or exclude. - - By default, fields that were not set by the API will not be included, - and keys will match the API response, *not* the property names from the model. - - For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, - the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). - - Args: - mode: - If mode is 'json', the dictionary will only contain JSON serializable types. e.g. `datetime` will be turned into a string, `"2024-3-22T18:11:19.117000Z"`. - If mode is 'python', the dictionary may contain any Python objects. e.g. `datetime(2024, 3, 22)` - - use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that are set to their default value from the output. - exclude_none: Whether to exclude fields that have a value of `None` from the output. - warnings: Whether to log warnings when invalid fields are encountered. This is only supported in Pydantic v2. - """ - return self.model_dump( - mode=mode, - by_alias=use_api_names, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - warnings=warnings, - ) - - def to_json( - self, - *, - indent: int | None = 2, - use_api_names: bool = True, - exclude_unset: bool = True, - exclude_defaults: bool = False, - exclude_none: bool = False, - warnings: bool = True, - ) -> str: - """Generates a JSON string representing this model as it would be received from or sent to the API (but with indentation). - - By default, fields that were not set by the API will not be included, - and keys will match the API response, *not* the property names from the model. - - For example, if the API responds with `"fooBar": true` but we've defined a `foo_bar: bool` property, - the output will use the `"fooBar"` key (unless `use_api_names=False` is passed). - - Args: - indent: Indentation to use in the JSON output. If `None` is passed, the output will be compact. Defaults to `2` - use_api_names: Whether to use the key that the API responded with or the property name. Defaults to `True`. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that have the default value. - exclude_none: Whether to exclude fields that have a value of `None`. - warnings: Whether to show any warnings that occurred during serialization. This is only supported in Pydantic v2. - """ - return self.model_dump_json( - indent=indent, - by_alias=use_api_names, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - warnings=warnings, - ) - - @override - def __str__(self) -> str: - # mypy complains about an invalid self arg - return f'{self.__repr_name__()}({self.__repr_str__(", ")})' # type: ignore[misc] - - # Override the 'construct' method in a way that supports recursive parsing without validation. - # Based on https://github.com/samuelcolvin/pydantic/issues/1168#issuecomment-817742836. - @classmethod - @override - def construct( - cls: Type[ModelT], - _fields_set: set[str] | None = None, - **values: object, - ) -> ModelT: - m = cls.__new__(cls) - fields_values: dict[str, object] = {} - - config = get_model_config(cls) - populate_by_name = ( - config.allow_population_by_field_name - if isinstance(config, _ConfigProtocol) - else config.get("populate_by_name") - ) - - if _fields_set is None: - _fields_set = set() - - model_fields = get_model_fields(cls) - for name, field in model_fields.items(): - key = field.alias - if key is None or (key not in values and populate_by_name): - key = name - - if key in values: - fields_values[name] = _construct_field(value=values[key], field=field, key=key) - _fields_set.add(name) - else: - fields_values[name] = field_get_default(field) - - _extra = {} - for key, value in values.items(): - if key not in model_fields: - if PYDANTIC_V2: - _extra[key] = value - else: - _fields_set.add(key) - fields_values[key] = value - - object.__setattr__(m, "__dict__", fields_values) - - if PYDANTIC_V2: - # these properties are copied from Pydantic's `model_construct()` method - object.__setattr__(m, "__pydantic_private__", None) - object.__setattr__(m, "__pydantic_extra__", _extra) - object.__setattr__(m, "__pydantic_fields_set__", _fields_set) - else: - # init_private_attributes() does not exist in v2 - m._init_private_attributes() # type: ignore - - # copied from Pydantic v1's `construct()` method - object.__setattr__(m, "__fields_set__", _fields_set) - - return m - - if not TYPE_CHECKING: - # type checkers incorrectly complain about this assignment - # because the type signatures are technically different - # although not in practice - model_construct = construct - - if not PYDANTIC_V2: - # we define aliases for some of the new pydantic v2 methods so - # that we can just document these methods without having to specify - # a specific pydantic version as some users may not know which - # pydantic version they are currently using - - @override - def model_dump( - self, - *, - mode: Literal["json", "python"] | str = "python", - include: IncEx = None, - exclude: IncEx = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool | Literal["none", "warn", "error"] = True, - context: dict[str, Any] | None = None, - serialize_as_any: bool = False, - ) -> dict[str, Any]: - """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump - - Generate a dictionary representation of the model, optionally specifying which fields to include or exclude. - - Args: - mode: The mode in which `to_python` should run. - If mode is 'json', the dictionary will only contain JSON serializable types. - If mode is 'python', the dictionary may contain any Python objects. - include: A list of fields to include in the output. - exclude: A list of fields to exclude from the output. - by_alias: Whether to use the field's alias in the dictionary key if defined. - exclude_unset: Whether to exclude fields that are unset or None from the output. - exclude_defaults: Whether to exclude fields that are set to their default value from the output. - exclude_none: Whether to exclude fields that have a value of `None` from the output. - round_trip: Whether to enable serialization and deserialization round-trip support. - warnings: Whether to log warnings when invalid fields are encountered. - - Returns: - A dictionary representation of the model. - """ - if mode != "python": - raise ValueError("mode is only supported in Pydantic v2") - if round_trip != False: - raise ValueError("round_trip is only supported in Pydantic v2") - if warnings != True: - raise ValueError("warnings is only supported in Pydantic v2") - if context is not None: - raise ValueError("context is only supported in Pydantic v2") - if serialize_as_any != False: - raise ValueError("serialize_as_any is only supported in Pydantic v2") - return super().dict( # pyright: ignore[reportDeprecated] - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - - @override - def model_dump_json( - self, - *, - indent: int | None = None, - include: IncEx = None, - exclude: IncEx = None, - by_alias: bool = False, - exclude_unset: bool = False, - exclude_defaults: bool = False, - exclude_none: bool = False, - round_trip: bool = False, - warnings: bool | Literal["none", "warn", "error"] = True, - context: dict[str, Any] | None = None, - serialize_as_any: bool = False, - ) -> str: - """Usage docs: https://docs.pydantic.dev/2.4/concepts/serialization/#modelmodel_dump_json - - Generates a JSON representation of the model using Pydantic's `to_json` method. - - Args: - indent: Indentation to use in the JSON output. If None is passed, the output will be compact. - include: Field(s) to include in the JSON output. Can take either a string or set of strings. - exclude: Field(s) to exclude from the JSON output. Can take either a string or set of strings. - by_alias: Whether to serialize using field aliases. - exclude_unset: Whether to exclude fields that have not been explicitly set. - exclude_defaults: Whether to exclude fields that have the default value. - exclude_none: Whether to exclude fields that have a value of `None`. - round_trip: Whether to use serialization/deserialization between JSON and class instance. - warnings: Whether to show any warnings that occurred during serialization. - - Returns: - A JSON string representation of the model. - """ - if round_trip != False: - raise ValueError("round_trip is only supported in Pydantic v2") - if warnings != True: - raise ValueError("warnings is only supported in Pydantic v2") - if context is not None: - raise ValueError("context is only supported in Pydantic v2") - if serialize_as_any != False: - raise ValueError("serialize_as_any is only supported in Pydantic v2") - return super().json( # type: ignore[reportDeprecated] - indent=indent, - include=include, - exclude=exclude, - by_alias=by_alias, - exclude_unset=exclude_unset, - exclude_defaults=exclude_defaults, - exclude_none=exclude_none, - ) - - -def _construct_field(value: object, field: FieldInfo, key: str) -> object: - if value is None: - return field_get_default(field) - - if PYDANTIC_V2: - type_ = field.annotation - else: - type_ = cast(type, field.outer_type_) # type: ignore - - if type_ is None: - raise RuntimeError(f"Unexpected field type is None for {key}") - - return construct_type(value=value, type_=type_) - - -def is_basemodel(type_: type) -> bool: - """Returns whether or not the given type is either a `BaseModel` or a union of `BaseModel`""" - if is_union(type_): - for variant in get_args(type_): - if is_basemodel(variant): - return True - - return False - - return is_basemodel_type(type_) - - -def is_basemodel_type(type_: type) -> TypeGuard[type[BaseModel] | type[GenericModel]]: - origin = get_origin(type_) or type_ - if not inspect.isclass(origin): - return False - return issubclass(origin, BaseModel) or issubclass(origin, GenericModel) - - -def build( - base_model_cls: Callable[P, _BaseModelT], - *args: P.args, - **kwargs: P.kwargs, -) -> _BaseModelT: - """Construct a BaseModel class without validation. - - This is useful for cases where you need to instantiate a `BaseModel` - from an API response as this provides type-safe params which isn't supported - by helpers like `construct_type()`. - - ```py - build(MyModel, my_field_a="foo", my_field_b=123) - ``` - """ - if args: - raise TypeError( - "Received positional arguments which are not supported; Keyword arguments must be used instead", - ) - - return cast(_BaseModelT, construct_type(type_=base_model_cls, value=kwargs)) - - -def construct_type_unchecked(*, value: object, type_: type[_T]) -> _T: - """Loose coercion to the expected type with construction of nested values. - - Note: the returned value from this function is not guaranteed to match the - given type. - """ - return cast(_T, construct_type(value=value, type_=type_)) - - -def construct_type(*, value: object, type_: object) -> object: - """Loose coercion to the expected type with construction of nested values. - - If the given value does not match the expected type then it is returned as-is. - """ - # we allow `object` as the input type because otherwise, passing things like - # `Literal['value']` will be reported as a type error by type checkers - type_ = cast("type[object]", type_) - - # unwrap `Annotated[T, ...]` -> `T` - if is_annotated_type(type_): - meta: tuple[Any, ...] = get_args(type_)[1:] - type_ = extract_type_arg(type_, 0) - else: - meta = tuple() - - # we need to use the origin class for any types that are subscripted generics - # e.g. Dict[str, object] - origin = get_origin(type_) or type_ - args = get_args(type_) - - if is_union(origin): - try: - return validate_type(type_=cast("type[object]", type_), value=value) - except Exception: - pass - - # if the type is a discriminated union then we want to construct the right variant - # in the union, even if the data doesn't match exactly, otherwise we'd break code - # that relies on the constructed class types, e.g. - # - # class FooType: - # kind: Literal['foo'] - # value: str - # - # class BarType: - # kind: Literal['bar'] - # value: int - # - # without this block, if the data we get is something like `{'kind': 'bar', 'value': 'foo'}` then - # we'd end up constructing `FooType` when it should be `BarType`. - discriminator = _build_discriminated_union_meta(union=type_, meta_annotations=meta) - if discriminator and is_mapping(value): - variant_value = value.get(discriminator.field_alias_from or discriminator.field_name) - if variant_value and isinstance(variant_value, str): - variant_type = discriminator.mapping.get(variant_value) - if variant_type: - return construct_type(type_=variant_type, value=value) - - # if the data is not valid, use the first variant that doesn't fail while deserializing - for variant in args: - try: - return construct_type(value=value, type_=variant) - except Exception: - continue - - raise RuntimeError(f"Could not convert data into a valid instance of {type_}") - - if origin == dict: - if not is_mapping(value): - return value - - _, items_type = get_args(type_) # Dict[_, items_type] - return {key: construct_type(value=item, type_=items_type) for key, item in value.items()} - - if not is_literal_type(type_) and (issubclass(origin, BaseModel) or issubclass(origin, GenericModel)): - if is_list(value): - return [cast(Any, type_).construct(**entry) if is_mapping(entry) else entry for entry in value] - - if is_mapping(value): - if issubclass(type_, BaseModel): - return type_.construct(**value) # type: ignore[arg-type] - - return cast(Any, type_).construct(**value) - - if origin == list: - if not is_list(value): - return value - - inner_type = args[0] # List[inner_type] - return [construct_type(value=entry, type_=inner_type) for entry in value] - - if origin == float: - if isinstance(value, int): - coerced = float(value) - if coerced != value: - return value - return coerced - - return value - - if type_ == datetime: - try: - return parse_datetime(value) # type: ignore - except Exception: - return value - - if type_ == date: - try: - return parse_date(value) # type: ignore - except Exception: - return value - - return value - - -@runtime_checkable -class CachedDiscriminatorType(Protocol): - __discriminator__: DiscriminatorDetails - - -class DiscriminatorDetails: - field_name: str - """The name of the discriminator field in the variant class, e.g. - - ```py - class Foo(BaseModel): - type: Literal['foo'] - ``` - - Will result in field_name='type' - """ - - field_alias_from: str | None - """The name of the discriminator field in the API response, e.g. - - ```py - class Foo(BaseModel): - type: Literal['foo'] = Field(alias='type_from_api') - ``` - - Will result in field_alias_from='type_from_api' - """ - - mapping: dict[str, type] - """Mapping of discriminator value to variant type, e.g. - - {'foo': FooVariant, 'bar': BarVariant} - """ - - def __init__( - self, - *, - mapping: dict[str, type], - discriminator_field: str, - discriminator_alias: str | None, - ) -> None: - self.mapping = mapping - self.field_name = discriminator_field - self.field_alias_from = discriminator_alias - - -def _build_discriminated_union_meta(*, union: type, meta_annotations: tuple[Any, ...]) -> DiscriminatorDetails | None: - if isinstance(union, CachedDiscriminatorType): - return union.__discriminator__ - - discriminator_field_name: str | None = None - - for annotation in meta_annotations: - if isinstance(annotation, PropertyInfo) and annotation.discriminator is not None: - discriminator_field_name = annotation.discriminator - break - - if not discriminator_field_name: - return None - - mapping: dict[str, type] = {} - discriminator_alias: str | None = None - - for variant in get_args(union): - variant = strip_annotated_type(variant) - if is_basemodel_type(variant): - if PYDANTIC_V2: - field = _extract_field_schema_pv2(variant, discriminator_field_name) - if not field: - continue - - # Note: if one variant defines an alias then they all should - discriminator_alias = field.get("serialization_alias") - - field_schema = field["schema"] - - if field_schema["type"] == "literal": - for entry in cast("LiteralSchema", field_schema)["expected"]: - if isinstance(entry, str): - mapping[entry] = variant - else: - field_info = cast("dict[str, FieldInfo]", variant.__fields__).get(discriminator_field_name) # pyright: ignore[reportDeprecated, reportUnnecessaryCast] - if not field_info: - continue - - # Note: if one variant defines an alias then they all should - discriminator_alias = field_info.alias - - if field_info.annotation and is_literal_type(field_info.annotation): - for entry in get_args(field_info.annotation): - if isinstance(entry, str): - mapping[entry] = variant - - if not mapping: - return None - - details = DiscriminatorDetails( - mapping=mapping, - discriminator_field=discriminator_field_name, - discriminator_alias=discriminator_alias, - ) - cast(CachedDiscriminatorType, union).__discriminator__ = details - return details - - -def _extract_field_schema_pv2(model: type[BaseModel], field_name: str) -> ModelField | None: - schema = model.__pydantic_core_schema__ - if schema["type"] != "model": - return None - - fields_schema = schema["schema"] - if fields_schema["type"] != "model-fields": - return None - - fields_schema = cast("ModelFieldsSchema", fields_schema) - - field = fields_schema["fields"].get(field_name) - if not field: - return None - - return cast("ModelField", field) # pyright: ignore[reportUnnecessaryCast] - - -def validate_type(*, type_: type[_T], value: object) -> _T: - """Strict validation that the given value matches the expected type""" - if inspect.isclass(type_) and issubclass(type_, pydantic.BaseModel): - return cast(_T, parse_obj(type_, value)) - - return cast(_T, _validate_non_model_type(type_=type_, value=value)) - - -def set_pydantic_config(typ: Any, config: pydantic.ConfigDict) -> None: - """Add a pydantic config for the given type. - - Note: this is a no-op on Pydantic v1. - """ - setattr(typ, "__pydantic_config__", config) # noqa: B010 - - -# our use of subclasssing here causes weirdness for type checkers, -# so we just pretend that we don't subclass -if TYPE_CHECKING: - GenericModel = BaseModel -else: - - class GenericModel(BaseGenericModel, BaseModel): - pass - - -if PYDANTIC_V2: - from pydantic import TypeAdapter as _TypeAdapter - - _CachedTypeAdapter = cast("TypeAdapter[object]", lru_cache(maxsize=None)(_TypeAdapter)) - - if TYPE_CHECKING: - from pydantic import TypeAdapter - else: - TypeAdapter = _CachedTypeAdapter - - def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: - return TypeAdapter(type_).validate_python(value) - -elif not TYPE_CHECKING: # TODO: condition is weird - - class RootModel(GenericModel, Generic[_T]): - """Used as a placeholder to easily convert runtime types to a Pydantic format - to provide validation. - - For example: - ```py - validated = RootModel[int](__root__="5").__root__ - # validated: 5 - ``` - """ - - __root__: _T - - def _validate_non_model_type(*, type_: type[_T], value: object) -> _T: - model = _create_pydantic_model(type_).validate(value) - return cast(_T, model.__root__) - - def _create_pydantic_model(type_: _T) -> Type[RootModel[_T]]: - return RootModel[type_] # type: ignore - - -class FinalRequestOptionsInput(TypedDict, total=False): - method: Required[str] - url: Required[str] - params: Query - headers: Headers - max_retries: int - timeout: float | Timeout | None - files: HttpxRequestFiles | None - idempotency_key: str - json_data: Body - extra_json: AnyMapping - - -@final -class FinalRequestOptions(pydantic.BaseModel): - method: str - url: str - params: Query = {} - headers: Union[Headers, NotGiven] = NotGiven() - max_retries: Union[int, NotGiven] = NotGiven() - timeout: Union[float, Timeout, None, NotGiven] = NotGiven() - files: Union[HttpxRequestFiles, None] = None - idempotency_key: Union[str, None] = None - post_parser: Union[Callable[[Any], Any], NotGiven] = NotGiven() - - # It should be noted that we cannot use `json` here as that would override - # a BaseModel method in an incompatible fashion. - json_data: Union[Body, None] = None - extra_json: Union[AnyMapping, None] = None - - if PYDANTIC_V2: - model_config: ClassVar[ConfigDict] = ConfigDict(arbitrary_types_allowed=True) - else: - - class Config(pydantic.BaseConfig): # pyright: ignore[reportDeprecated] - arbitrary_types_allowed: bool = True - - def get_max_retries(self, max_retries: int) -> int: - if isinstance(self.max_retries, NotGiven): - return max_retries - return self.max_retries - - def _strip_raw_response_header(self) -> None: - if not is_given(self.headers): - return - - if self.headers.get(RAW_RESPONSE_HEADER): - self.headers = {**self.headers} - self.headers.pop(RAW_RESPONSE_HEADER) - - # override the `construct` method so that we can run custom transformations. - # this is necessary as we don't want to do any actual runtime type checking - # (which means we can't use validators) but we do want to ensure that `NotGiven` - # values are not present - # - # type ignore required because we're adding explicit types to `**values` - @classmethod - def construct( # type: ignore - cls, - _fields_set: set[str] | None = None, - **values: Unpack[FinalRequestOptionsInput], - ) -> FinalRequestOptions: - kwargs: dict[str, Any] = { - # we unconditionally call `strip_not_given` on any value - # as it will just ignore any non-mapping types - key: strip_not_given(value) - for key, value in values.items() - } - if PYDANTIC_V2: - return super().model_construct(_fields_set, **kwargs) - return cast(FinalRequestOptions, super().construct(_fields_set, **kwargs)) # pyright: ignore[reportDeprecated] - - if not TYPE_CHECKING: - # type checkers incorrectly complain about this assignment - model_construct = construct diff --git a/myenv/Lib/site-packages/groq/_qs.py b/myenv/Lib/site-packages/groq/_qs.py deleted file mode 100644 index 274320c..0000000 --- a/myenv/Lib/site-packages/groq/_qs.py +++ /dev/null @@ -1,150 +0,0 @@ -from __future__ import annotations - -from typing import Any, List, Tuple, Union, Mapping, TypeVar -from urllib.parse import parse_qs, urlencode -from typing_extensions import Literal, get_args - -from ._types import NOT_GIVEN, NotGiven, NotGivenOr -from ._utils import flatten - -_T = TypeVar("_T") - - -ArrayFormat = Literal["comma", "repeat", "indices", "brackets"] -NestedFormat = Literal["dots", "brackets"] - -PrimitiveData = Union[str, int, float, bool, None] -# this should be Data = Union[PrimitiveData, "List[Data]", "Tuple[Data]", "Mapping[str, Data]"] -# https://github.com/microsoft/pyright/issues/3555 -Data = Union[PrimitiveData, List[Any], Tuple[Any], "Mapping[str, Any]"] -Params = Mapping[str, Data] - - -class Querystring: - array_format: ArrayFormat - nested_format: NestedFormat - - def __init__( - self, - *, - array_format: ArrayFormat = "repeat", - nested_format: NestedFormat = "brackets", - ) -> None: - self.array_format = array_format - self.nested_format = nested_format - - def parse(self, query: str) -> Mapping[str, object]: - # Note: custom format syntax is not supported yet - return parse_qs(query) - - def stringify( - self, - params: Params, - *, - array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, - nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, - ) -> str: - return urlencode( - self.stringify_items( - params, - array_format=array_format, - nested_format=nested_format, - ) - ) - - def stringify_items( - self, - params: Params, - *, - array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, - nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, - ) -> list[tuple[str, str]]: - opts = Options( - qs=self, - array_format=array_format, - nested_format=nested_format, - ) - return flatten([self._stringify_item(key, value, opts) for key, value in params.items()]) - - def _stringify_item( - self, - key: str, - value: Data, - opts: Options, - ) -> list[tuple[str, str]]: - if isinstance(value, Mapping): - items: list[tuple[str, str]] = [] - nested_format = opts.nested_format - for subkey, subvalue in value.items(): - items.extend( - self._stringify_item( - # TODO: error if unknown format - f"{key}.{subkey}" if nested_format == "dots" else f"{key}[{subkey}]", - subvalue, - opts, - ) - ) - return items - - if isinstance(value, (list, tuple)): - array_format = opts.array_format - if array_format == "comma": - return [ - ( - key, - ",".join(self._primitive_value_to_str(item) for item in value if item is not None), - ), - ] - elif array_format == "repeat": - items = [] - for item in value: - items.extend(self._stringify_item(key, item, opts)) - return items - elif array_format == "indices": - raise NotImplementedError("The array indices format is not supported yet") - elif array_format == "brackets": - items = [] - key = key + "[]" - for item in value: - items.extend(self._stringify_item(key, item, opts)) - return items - else: - raise NotImplementedError( - f"Unknown array_format value: {array_format}, choose from {', '.join(get_args(ArrayFormat))}" - ) - - serialised = self._primitive_value_to_str(value) - if not serialised: - return [] - return [(key, serialised)] - - def _primitive_value_to_str(self, value: PrimitiveData) -> str: - # copied from httpx - if value is True: - return "true" - elif value is False: - return "false" - elif value is None: - return "" - return str(value) - - -_qs = Querystring() -parse = _qs.parse -stringify = _qs.stringify -stringify_items = _qs.stringify_items - - -class Options: - array_format: ArrayFormat - nested_format: NestedFormat - - def __init__( - self, - qs: Querystring = _qs, - *, - array_format: NotGivenOr[ArrayFormat] = NOT_GIVEN, - nested_format: NotGivenOr[NestedFormat] = NOT_GIVEN, - ) -> None: - self.array_format = qs.array_format if isinstance(array_format, NotGiven) else array_format - self.nested_format = qs.nested_format if isinstance(nested_format, NotGiven) else nested_format diff --git a/myenv/Lib/site-packages/groq/_resource.py b/myenv/Lib/site-packages/groq/_resource.py deleted file mode 100644 index fc39158..0000000 --- a/myenv/Lib/site-packages/groq/_resource.py +++ /dev/null @@ -1,43 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import time -from typing import TYPE_CHECKING - -import anyio - -if TYPE_CHECKING: - from ._client import Groq, AsyncGroq - - -class SyncAPIResource: - _client: Groq - - def __init__(self, client: Groq) -> None: - self._client = client - self._get = client.get - self._post = client.post - self._patch = client.patch - self._put = client.put - self._delete = client.delete - self._get_api_list = client.get_api_list - - def _sleep(self, seconds: float) -> None: - time.sleep(seconds) - - -class AsyncAPIResource: - _client: AsyncGroq - - def __init__(self, client: AsyncGroq) -> None: - self._client = client - self._get = client.get - self._post = client.post - self._patch = client.patch - self._put = client.put - self._delete = client.delete - self._get_api_list = client.get_api_list - - async def _sleep(self, seconds: float) -> None: - await anyio.sleep(seconds) diff --git a/myenv/Lib/site-packages/groq/_response.py b/myenv/Lib/site-packages/groq/_response.py deleted file mode 100644 index d7ae9cd..0000000 --- a/myenv/Lib/site-packages/groq/_response.py +++ /dev/null @@ -1,821 +0,0 @@ -from __future__ import annotations - -import os -import inspect -import logging -import datetime -import functools -from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Union, - Generic, - TypeVar, - Callable, - Iterator, - AsyncIterator, - cast, - overload, -) -from typing_extensions import Awaitable, ParamSpec, override, get_origin - -import anyio -import httpx -import pydantic - -from ._types import NoneType -from ._utils import is_given, extract_type_arg, is_annotated_type, extract_type_var_from_base -from ._models import BaseModel, is_basemodel -from ._constants import RAW_RESPONSE_HEADER, OVERRIDE_CAST_TO_HEADER -from ._streaming import Stream, AsyncStream, is_stream_class_type, extract_stream_chunk_type -from ._exceptions import GroqError, APIResponseValidationError - -if TYPE_CHECKING: - from ._models import FinalRequestOptions - from ._base_client import BaseClient - - -P = ParamSpec("P") -R = TypeVar("R") -_T = TypeVar("_T") -_APIResponseT = TypeVar("_APIResponseT", bound="APIResponse[Any]") -_AsyncAPIResponseT = TypeVar("_AsyncAPIResponseT", bound="AsyncAPIResponse[Any]") - -log: logging.Logger = logging.getLogger(__name__) - - -class BaseAPIResponse(Generic[R]): - _cast_to: type[R] - _client: BaseClient[Any, Any] - _parsed_by_type: dict[type[Any], Any] - _is_sse_stream: bool - _stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None - _options: FinalRequestOptions - - http_response: httpx.Response - - retries_taken: int - """The number of retries made. If no retries happened this will be `0`""" - - def __init__( - self, - *, - raw: httpx.Response, - cast_to: type[R], - client: BaseClient[Any, Any], - stream: bool, - stream_cls: type[Stream[Any]] | type[AsyncStream[Any]] | None, - options: FinalRequestOptions, - retries_taken: int = 0, - ) -> None: - self._cast_to = cast_to - self._client = client - self._parsed_by_type = {} - self._is_sse_stream = stream - self._stream_cls = stream_cls - self._options = options - self.http_response = raw - self.retries_taken = retries_taken - - @property - def headers(self) -> httpx.Headers: - return self.http_response.headers - - @property - def http_request(self) -> httpx.Request: - """Returns the httpx Request instance associated with the current response.""" - return self.http_response.request - - @property - def status_code(self) -> int: - return self.http_response.status_code - - @property - def url(self) -> httpx.URL: - """Returns the URL for which the request was made.""" - return self.http_response.url - - @property - def method(self) -> str: - return self.http_request.method - - @property - def http_version(self) -> str: - return self.http_response.http_version - - @property - def elapsed(self) -> datetime.timedelta: - """The time taken for the complete request/response cycle to complete.""" - return self.http_response.elapsed - - @property - def is_closed(self) -> bool: - """Whether or not the response body has been closed. - - If this is False then there is response data that has not been read yet. - You must either fully consume the response body or call `.close()` - before discarding the response to prevent resource leaks. - """ - return self.http_response.is_closed - - @override - def __repr__(self) -> str: - return ( - f"<{self.__class__.__name__} [{self.status_code} {self.http_response.reason_phrase}] type={self._cast_to}>" - ) - - def _parse(self, *, to: type[_T] | None = None) -> R | _T: - # unwrap `Annotated[T, ...]` -> `T` - if to and is_annotated_type(to): - to = extract_type_arg(to, 0) - - if self._is_sse_stream: - if to: - if not is_stream_class_type(to): - raise TypeError(f"Expected custom parse type to be a subclass of {Stream} or {AsyncStream}") - - return cast( - _T, - to( - cast_to=extract_stream_chunk_type( - to, - failure_message="Expected custom stream type to be passed with a type argument, e.g. Stream[ChunkType]", - ), - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - if self._stream_cls: - return cast( - R, - self._stream_cls( - cast_to=extract_stream_chunk_type(self._stream_cls), - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - stream_cls = cast("type[Stream[Any]] | type[AsyncStream[Any]] | None", self._client._default_stream_cls) - if stream_cls is None: - raise MissingStreamClassError() - - return cast( - R, - stream_cls( - cast_to=self._cast_to, - response=self.http_response, - client=cast(Any, self._client), - ), - ) - - cast_to = to if to is not None else self._cast_to - - # unwrap `Annotated[T, ...]` -> `T` - if is_annotated_type(cast_to): - cast_to = extract_type_arg(cast_to, 0) - - if cast_to is NoneType: - return cast(R, None) - - response = self.http_response - if cast_to == str: - return cast(R, response.text) - - if cast_to == bytes: - return cast(R, response.content) - - if cast_to == int: - return cast(R, int(response.text)) - - if cast_to == float: - return cast(R, float(response.text)) - - origin = get_origin(cast_to) or cast_to - - if origin == APIResponse: - raise RuntimeError("Unexpected state - cast_to is `APIResponse`") - - if inspect.isclass(origin) and issubclass(origin, httpx.Response): - # Because of the invariance of our ResponseT TypeVar, users can subclass httpx.Response - # and pass that class to our request functions. We cannot change the variance to be either - # covariant or contravariant as that makes our usage of ResponseT illegal. We could construct - # the response class ourselves but that is something that should be supported directly in httpx - # as it would be easy to incorrectly construct the Response object due to the multitude of arguments. - if cast_to != httpx.Response: - raise ValueError(f"Subclasses of httpx.Response cannot be passed to `cast_to`") - return cast(R, response) - - if inspect.isclass(origin) and not issubclass(origin, BaseModel) and issubclass(origin, pydantic.BaseModel): - raise TypeError("Pydantic models must subclass our base model type, e.g. `from groq import BaseModel`") - - if ( - cast_to is not object - and not origin is list - and not origin is dict - and not origin is Union - and not issubclass(origin, BaseModel) - ): - raise RuntimeError( - f"Unsupported type, expected {cast_to} to be a subclass of {BaseModel}, {dict}, {list}, {Union}, {NoneType}, {str} or {httpx.Response}." - ) - - # split is required to handle cases where additional information is included - # in the response, e.g. application/json; charset=utf-8 - content_type, *_ = response.headers.get("content-type", "*").split(";") - if content_type != "application/json": - if is_basemodel(cast_to): - try: - data = response.json() - except Exception as exc: - log.debug("Could not read JSON from response data due to %s - %s", type(exc), exc) - else: - return self._client._process_response_data( - data=data, - cast_to=cast_to, # type: ignore - response=response, - ) - - if self._client._strict_response_validation: - raise APIResponseValidationError( - response=response, - message=f"Expected Content-Type response header to be `application/json` but received `{content_type}` instead.", - body=response.text, - ) - - # If the API responds with content that isn't JSON then we just return - # the (decoded) text without performing any parsing so that you can still - # handle the response however you need to. - return response.text # type: ignore - - data = response.json() - - return self._client._process_response_data( - data=data, - cast_to=cast_to, # type: ignore - response=response, - ) - - -class APIResponse(BaseAPIResponse[R]): - @overload - def parse(self, *, to: type[_T]) -> _T: ... - - @overload - def parse(self) -> R: ... - - def parse(self, *, to: type[_T] | None = None) -> R | _T: - """Returns the rich python representation of this response's data. - - For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. - - You can customise the type that the response is parsed into through - the `to` argument, e.g. - - ```py - from groq import BaseModel - - - class MyModel(BaseModel): - foo: str - - - obj = response.parse(to=MyModel) - print(obj.foo) - ``` - - We support parsing: - - `BaseModel` - - `dict` - - `list` - - `Union` - - `str` - - `int` - - `float` - - `httpx.Response` - """ - cache_key = to if to is not None else self._cast_to - cached = self._parsed_by_type.get(cache_key) - if cached is not None: - return cached # type: ignore[no-any-return] - - if not self._is_sse_stream: - self.read() - - parsed = self._parse(to=to) - if is_given(self._options.post_parser): - parsed = self._options.post_parser(parsed) - - self._parsed_by_type[cache_key] = parsed - return parsed - - def read(self) -> bytes: - """Read and return the binary response content.""" - try: - return self.http_response.read() - except httpx.StreamConsumed as exc: - # The default error raised by httpx isn't very - # helpful in our case so we re-raise it with - # a different error message. - raise StreamAlreadyConsumed() from exc - - def text(self) -> str: - """Read and decode the response content into a string.""" - self.read() - return self.http_response.text - - def json(self) -> object: - """Read and decode the JSON response content.""" - self.read() - return self.http_response.json() - - def close(self) -> None: - """Close the response and release the connection. - - Automatically called if the response body is read to completion. - """ - self.http_response.close() - - def iter_bytes(self, chunk_size: int | None = None) -> Iterator[bytes]: - """ - A byte-iterator over the decoded response content. - - This automatically handles gzip, deflate and brotli encoded responses. - """ - for chunk in self.http_response.iter_bytes(chunk_size): - yield chunk - - def iter_text(self, chunk_size: int | None = None) -> Iterator[str]: - """A str-iterator over the decoded response content - that handles both gzip, deflate, etc but also detects the content's - string encoding. - """ - for chunk in self.http_response.iter_text(chunk_size): - yield chunk - - def iter_lines(self) -> Iterator[str]: - """Like `iter_text()` but will only yield chunks for each line""" - for chunk in self.http_response.iter_lines(): - yield chunk - - -class AsyncAPIResponse(BaseAPIResponse[R]): - @overload - async def parse(self, *, to: type[_T]) -> _T: ... - - @overload - async def parse(self) -> R: ... - - async def parse(self, *, to: type[_T] | None = None) -> R | _T: - """Returns the rich python representation of this response's data. - - For lower-level control, see `.read()`, `.json()`, `.iter_bytes()`. - - You can customise the type that the response is parsed into through - the `to` argument, e.g. - - ```py - from groq import BaseModel - - - class MyModel(BaseModel): - foo: str - - - obj = response.parse(to=MyModel) - print(obj.foo) - ``` - - We support parsing: - - `BaseModel` - - `dict` - - `list` - - `Union` - - `str` - - `httpx.Response` - """ - cache_key = to if to is not None else self._cast_to - cached = self._parsed_by_type.get(cache_key) - if cached is not None: - return cached # type: ignore[no-any-return] - - if not self._is_sse_stream: - await self.read() - - parsed = self._parse(to=to) - if is_given(self._options.post_parser): - parsed = self._options.post_parser(parsed) - - self._parsed_by_type[cache_key] = parsed - return parsed - - async def read(self) -> bytes: - """Read and return the binary response content.""" - try: - return await self.http_response.aread() - except httpx.StreamConsumed as exc: - # the default error raised by httpx isn't very - # helpful in our case so we re-raise it with - # a different error message - raise StreamAlreadyConsumed() from exc - - async def text(self) -> str: - """Read and decode the response content into a string.""" - await self.read() - return self.http_response.text - - async def json(self) -> object: - """Read and decode the JSON response content.""" - await self.read() - return self.http_response.json() - - async def close(self) -> None: - """Close the response and release the connection. - - Automatically called if the response body is read to completion. - """ - await self.http_response.aclose() - - async def iter_bytes(self, chunk_size: int | None = None) -> AsyncIterator[bytes]: - """ - A byte-iterator over the decoded response content. - - This automatically handles gzip, deflate and brotli encoded responses. - """ - async for chunk in self.http_response.aiter_bytes(chunk_size): - yield chunk - - async def iter_text(self, chunk_size: int | None = None) -> AsyncIterator[str]: - """A str-iterator over the decoded response content - that handles both gzip, deflate, etc but also detects the content's - string encoding. - """ - async for chunk in self.http_response.aiter_text(chunk_size): - yield chunk - - async def iter_lines(self) -> AsyncIterator[str]: - """Like `iter_text()` but will only yield chunks for each line""" - async for chunk in self.http_response.aiter_lines(): - yield chunk - - -class BinaryAPIResponse(APIResponse[bytes]): - """Subclass of APIResponse providing helpers for dealing with binary data. - - Note: If you want to stream the response data instead of eagerly reading it - all at once then you should use `.with_streaming_response` when making - the API request, e.g. `.with_streaming_response.get_binary_response()` - """ - - def write_to_file( - self, - file: str | os.PathLike[str], - ) -> None: - """Write the output to the given file. - - Accepts a filename or any path-like object, e.g. pathlib.Path - - Note: if you want to stream the data to the file instead of writing - all at once then you should use `.with_streaming_response` when making - the API request, e.g. `.with_streaming_response.get_binary_response()` - """ - with open(file, mode="wb") as f: - for data in self.iter_bytes(): - f.write(data) - - -class AsyncBinaryAPIResponse(AsyncAPIResponse[bytes]): - """Subclass of APIResponse providing helpers for dealing with binary data. - - Note: If you want to stream the response data instead of eagerly reading it - all at once then you should use `.with_streaming_response` when making - the API request, e.g. `.with_streaming_response.get_binary_response()` - """ - - async def write_to_file( - self, - file: str | os.PathLike[str], - ) -> None: - """Write the output to the given file. - - Accepts a filename or any path-like object, e.g. pathlib.Path - - Note: if you want to stream the data to the file instead of writing - all at once then you should use `.with_streaming_response` when making - the API request, e.g. `.with_streaming_response.get_binary_response()` - """ - path = anyio.Path(file) - async with await path.open(mode="wb") as f: - async for data in self.iter_bytes(): - await f.write(data) - - -class StreamedBinaryAPIResponse(APIResponse[bytes]): - def stream_to_file( - self, - file: str | os.PathLike[str], - *, - chunk_size: int | None = None, - ) -> None: - """Streams the output to the given file. - - Accepts a filename or any path-like object, e.g. pathlib.Path - """ - with open(file, mode="wb") as f: - for data in self.iter_bytes(chunk_size): - f.write(data) - - -class AsyncStreamedBinaryAPIResponse(AsyncAPIResponse[bytes]): - async def stream_to_file( - self, - file: str | os.PathLike[str], - *, - chunk_size: int | None = None, - ) -> None: - """Streams the output to the given file. - - Accepts a filename or any path-like object, e.g. pathlib.Path - """ - path = anyio.Path(file) - async with await path.open(mode="wb") as f: - async for data in self.iter_bytes(chunk_size): - await f.write(data) - - -class MissingStreamClassError(TypeError): - def __init__(self) -> None: - super().__init__( - "The `stream` argument was set to `True` but the `stream_cls` argument was not given. See `groq._streaming` for reference", - ) - - -class StreamAlreadyConsumed(GroqError): - """ - Attempted to read or stream content, but the content has already - been streamed. - - This can happen if you use a method like `.iter_lines()` and then attempt - to read th entire response body afterwards, e.g. - - ```py - response = await client.post(...) - async for line in response.iter_lines(): - ... # do something with `line` - - content = await response.read() - # ^ error - ``` - - If you want this behaviour you'll need to either manually accumulate the response - content or call `await response.read()` before iterating over the stream. - """ - - def __init__(self) -> None: - message = ( - "Attempted to read or stream some content, but the content has " - "already been streamed. " - "This could be due to attempting to stream the response " - "content more than once." - "\n\n" - "You can fix this by manually accumulating the response content while streaming " - "or by calling `.read()` before starting to stream." - ) - super().__init__(message) - - -class ResponseContextManager(Generic[_APIResponseT]): - """Context manager for ensuring that a request is not made - until it is entered and that the response will always be closed - when the context manager exits - """ - - def __init__(self, request_func: Callable[[], _APIResponseT]) -> None: - self._request_func = request_func - self.__response: _APIResponseT | None = None - - def __enter__(self) -> _APIResponseT: - self.__response = self._request_func() - return self.__response - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - if self.__response is not None: - self.__response.close() - - -class AsyncResponseContextManager(Generic[_AsyncAPIResponseT]): - """Context manager for ensuring that a request is not made - until it is entered and that the response will always be closed - when the context manager exits - """ - - def __init__(self, api_request: Awaitable[_AsyncAPIResponseT]) -> None: - self._api_request = api_request - self.__response: _AsyncAPIResponseT | None = None - - async def __aenter__(self) -> _AsyncAPIResponseT: - self.__response = await self._api_request - return self.__response - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - if self.__response is not None: - await self.__response.close() - - -def to_streamed_response_wrapper(func: Callable[P, R]) -> Callable[P, ResponseContextManager[APIResponse[R]]]: - """Higher order function that takes one of our bound API methods and wraps it - to support streaming and returning the raw `APIResponse` object directly. - """ - - @functools.wraps(func) - def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[APIResponse[R]]: - extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} - extra_headers[RAW_RESPONSE_HEADER] = "stream" - - kwargs["extra_headers"] = extra_headers - - make_request = functools.partial(func, *args, **kwargs) - - return ResponseContextManager(cast(Callable[[], APIResponse[R]], make_request)) - - return wrapped - - -def async_to_streamed_response_wrapper( - func: Callable[P, Awaitable[R]], -) -> Callable[P, AsyncResponseContextManager[AsyncAPIResponse[R]]]: - """Higher order function that takes one of our bound API methods and wraps it - to support streaming and returning the raw `APIResponse` object directly. - """ - - @functools.wraps(func) - def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[AsyncAPIResponse[R]]: - extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} - extra_headers[RAW_RESPONSE_HEADER] = "stream" - - kwargs["extra_headers"] = extra_headers - - make_request = func(*args, **kwargs) - - return AsyncResponseContextManager(cast(Awaitable[AsyncAPIResponse[R]], make_request)) - - return wrapped - - -def to_custom_streamed_response_wrapper( - func: Callable[P, object], - response_cls: type[_APIResponseT], -) -> Callable[P, ResponseContextManager[_APIResponseT]]: - """Higher order function that takes one of our bound API methods and an `APIResponse` class - and wraps the method to support streaming and returning the given response class directly. - - Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` - """ - - @functools.wraps(func) - def wrapped(*args: P.args, **kwargs: P.kwargs) -> ResponseContextManager[_APIResponseT]: - extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} - extra_headers[RAW_RESPONSE_HEADER] = "stream" - extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls - - kwargs["extra_headers"] = extra_headers - - make_request = functools.partial(func, *args, **kwargs) - - return ResponseContextManager(cast(Callable[[], _APIResponseT], make_request)) - - return wrapped - - -def async_to_custom_streamed_response_wrapper( - func: Callable[P, Awaitable[object]], - response_cls: type[_AsyncAPIResponseT], -) -> Callable[P, AsyncResponseContextManager[_AsyncAPIResponseT]]: - """Higher order function that takes one of our bound API methods and an `APIResponse` class - and wraps the method to support streaming and returning the given response class directly. - - Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` - """ - - @functools.wraps(func) - def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncResponseContextManager[_AsyncAPIResponseT]: - extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} - extra_headers[RAW_RESPONSE_HEADER] = "stream" - extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls - - kwargs["extra_headers"] = extra_headers - - make_request = func(*args, **kwargs) - - return AsyncResponseContextManager(cast(Awaitable[_AsyncAPIResponseT], make_request)) - - return wrapped - - -def to_raw_response_wrapper(func: Callable[P, R]) -> Callable[P, APIResponse[R]]: - """Higher order function that takes one of our bound API methods and wraps it - to support returning the raw `APIResponse` object directly. - """ - - @functools.wraps(func) - def wrapped(*args: P.args, **kwargs: P.kwargs) -> APIResponse[R]: - extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} - extra_headers[RAW_RESPONSE_HEADER] = "raw" - - kwargs["extra_headers"] = extra_headers - - return cast(APIResponse[R], func(*args, **kwargs)) - - return wrapped - - -def async_to_raw_response_wrapper(func: Callable[P, Awaitable[R]]) -> Callable[P, Awaitable[AsyncAPIResponse[R]]]: - """Higher order function that takes one of our bound API methods and wraps it - to support returning the raw `APIResponse` object directly. - """ - - @functools.wraps(func) - async def wrapped(*args: P.args, **kwargs: P.kwargs) -> AsyncAPIResponse[R]: - extra_headers: dict[str, str] = {**(cast(Any, kwargs.get("extra_headers")) or {})} - extra_headers[RAW_RESPONSE_HEADER] = "raw" - - kwargs["extra_headers"] = extra_headers - - return cast(AsyncAPIResponse[R], await func(*args, **kwargs)) - - return wrapped - - -def to_custom_raw_response_wrapper( - func: Callable[P, object], - response_cls: type[_APIResponseT], -) -> Callable[P, _APIResponseT]: - """Higher order function that takes one of our bound API methods and an `APIResponse` class - and wraps the method to support returning the given response class directly. - - Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` - """ - - @functools.wraps(func) - def wrapped(*args: P.args, **kwargs: P.kwargs) -> _APIResponseT: - extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} - extra_headers[RAW_RESPONSE_HEADER] = "raw" - extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls - - kwargs["extra_headers"] = extra_headers - - return cast(_APIResponseT, func(*args, **kwargs)) - - return wrapped - - -def async_to_custom_raw_response_wrapper( - func: Callable[P, Awaitable[object]], - response_cls: type[_AsyncAPIResponseT], -) -> Callable[P, Awaitable[_AsyncAPIResponseT]]: - """Higher order function that takes one of our bound API methods and an `APIResponse` class - and wraps the method to support returning the given response class directly. - - Note: the given `response_cls` *must* be concrete, e.g. `class BinaryAPIResponse(APIResponse[bytes])` - """ - - @functools.wraps(func) - def wrapped(*args: P.args, **kwargs: P.kwargs) -> Awaitable[_AsyncAPIResponseT]: - extra_headers: dict[str, Any] = {**(cast(Any, kwargs.get("extra_headers")) or {})} - extra_headers[RAW_RESPONSE_HEADER] = "raw" - extra_headers[OVERRIDE_CAST_TO_HEADER] = response_cls - - kwargs["extra_headers"] = extra_headers - - return cast(Awaitable[_AsyncAPIResponseT], func(*args, **kwargs)) - - return wrapped - - -def extract_response_type(typ: type[BaseAPIResponse[Any]]) -> type: - """Given a type like `APIResponse[T]`, returns the generic type variable `T`. - - This also handles the case where a concrete subclass is given, e.g. - ```py - class MyResponse(APIResponse[bytes]): - ... - - extract_response_type(MyResponse) -> bytes - ``` - """ - return extract_type_var_from_base( - typ, - generic_bases=cast("tuple[type, ...]", (BaseAPIResponse, APIResponse, AsyncAPIResponse)), - index=0, - ) diff --git a/myenv/Lib/site-packages/groq/_streaming.py b/myenv/Lib/site-packages/groq/_streaming.py deleted file mode 100644 index 01defb2..0000000 --- a/myenv/Lib/site-packages/groq/_streaming.py +++ /dev/null @@ -1,410 +0,0 @@ -# Note: initially copied from https://github.com/florimondmanca/httpx-sse/blob/master/src/httpx_sse/_decoders.py -from __future__ import annotations - -import json -import inspect -from types import TracebackType -from typing import TYPE_CHECKING, Any, Generic, TypeVar, Iterator, AsyncIterator, cast -from typing_extensions import Self, Protocol, TypeGuard, override, get_origin, runtime_checkable - -import httpx - -from ._utils import is_mapping, extract_type_var_from_base -from ._exceptions import APIError - -if TYPE_CHECKING: - from ._client import Groq, AsyncGroq - - -_T = TypeVar("_T") - - -class Stream(Generic[_T]): - """Provides the core interface to iterate over a synchronous stream response.""" - - response: httpx.Response - - _decoder: SSEBytesDecoder - - def __init__( - self, - *, - cast_to: type[_T], - response: httpx.Response, - client: Groq, - ) -> None: - self.response = response - self._cast_to = cast_to - self._client = client - self._decoder = client._make_sse_decoder() - self._iterator = self.__stream__() - - def __next__(self) -> _T: - return self._iterator.__next__() - - def __iter__(self) -> Iterator[_T]: - for item in self._iterator: - yield item - - def _iter_events(self) -> Iterator[ServerSentEvent]: - yield from self._decoder.iter_bytes(self.response.iter_bytes()) - - def __stream__(self) -> Iterator[_T]: - cast_to = cast(Any, self._cast_to) - response = self.response - process_data = self._client._process_response_data - iterator = self._iter_events() - - for sse in iterator: - if sse.data.startswith("[DONE]"): - break - - if sse.event is None: - data = sse.json() - if is_mapping(data) and data.get("error"): - message = None - error = data.get("error") - if is_mapping(error): - message = error.get("message") - if not message or not isinstance(message, str): - message = "An error occurred during streaming" - - raise APIError( - message=message, - request=self.response.request, - body=data["error"], - ) - - yield process_data(data=data, cast_to=cast_to, response=response) - - else: - data = sse.json() - - if sse.event == "error" and is_mapping(data) and data.get("error"): - message = None - error = data.get("error") - if is_mapping(error): - message = error.get("message") - if not message or not isinstance(message, str): - message = "An error occurred during streaming" - - raise APIError( - message=message, - request=self.response.request, - body=data["error"], - ) - - yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) - - # Ensure the entire stream is consumed - for _sse in iterator: - ... - - def __enter__(self) -> Self: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None, - exc: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - self.close() - - def close(self) -> None: - """ - Close the response and release the connection. - - Automatically called if the response body is read to completion. - """ - self.response.close() - - -class AsyncStream(Generic[_T]): - """Provides the core interface to iterate over an asynchronous stream response.""" - - response: httpx.Response - - _decoder: SSEDecoder | SSEBytesDecoder - - def __init__( - self, - *, - cast_to: type[_T], - response: httpx.Response, - client: AsyncGroq, - ) -> None: - self.response = response - self._cast_to = cast_to - self._client = client - self._decoder = client._make_sse_decoder() - self._iterator = self.__stream__() - - async def __anext__(self) -> _T: - return await self._iterator.__anext__() - - async def __aiter__(self) -> AsyncIterator[_T]: - async for item in self._iterator: - yield item - - async def _iter_events(self) -> AsyncIterator[ServerSentEvent]: - async for sse in self._decoder.aiter_bytes(self.response.aiter_bytes()): - yield sse - - async def __stream__(self) -> AsyncIterator[_T]: - cast_to = cast(Any, self._cast_to) - response = self.response - process_data = self._client._process_response_data - iterator = self._iter_events() - - async for sse in iterator: - if sse.data.startswith("[DONE]"): - break - - if sse.event is None: - data = sse.json() - if is_mapping(data) and data.get("error"): - message = None - error = data.get("error") - if is_mapping(error): - message = error.get("message") - if not message or not isinstance(message, str): - message = "An error occurred during streaming" - - raise APIError( - message=message, - request=self.response.request, - body=data["error"], - ) - - yield process_data(data=data, cast_to=cast_to, response=response) - - else: - data = sse.json() - - if sse.event == "error" and is_mapping(data) and data.get("error"): - message = None - error = data.get("error") - if is_mapping(error): - message = error.get("message") - if not message or not isinstance(message, str): - message = "An error occurred during streaming" - - raise APIError( - message=message, - request=self.response.request, - body=data["error"], - ) - - yield process_data(data={"data": data, "event": sse.event}, cast_to=cast_to, response=response) - - # Ensure the entire stream is consumed - async for _sse in iterator: - ... - - async def __aenter__(self) -> Self: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - await self.close() - - async def close(self) -> None: - """ - Close the response and release the connection. - - Automatically called if the response body is read to completion. - """ - await self.response.aclose() - - -class ServerSentEvent: - def __init__( - self, - *, - event: str | None = None, - data: str | None = None, - id: str | None = None, - retry: int | None = None, - ) -> None: - if data is None: - data = "" - - self._id = id - self._data = data - self._event = event or None - self._retry = retry - - @property - def event(self) -> str | None: - return self._event - - @property - def id(self) -> str | None: - return self._id - - @property - def retry(self) -> int | None: - return self._retry - - @property - def data(self) -> str: - return self._data - - def json(self) -> Any: - return json.loads(self.data) - - @override - def __repr__(self) -> str: - return f"ServerSentEvent(event={self.event}, data={self.data}, id={self.id}, retry={self.retry})" - - -class SSEDecoder: - _data: list[str] - _event: str | None - _retry: int | None - _last_event_id: str | None - - def __init__(self) -> None: - self._event = None - self._data = [] - self._last_event_id = None - self._retry = None - - def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]: - """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" - for chunk in self._iter_chunks(iterator): - # Split before decoding so splitlines() only uses \r and \n - for raw_line in chunk.splitlines(): - line = raw_line.decode("utf-8") - sse = self.decode(line) - if sse: - yield sse - - def _iter_chunks(self, iterator: Iterator[bytes]) -> Iterator[bytes]: - """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks""" - data = b"" - for chunk in iterator: - for line in chunk.splitlines(keepends=True): - data += line - if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): - yield data - data = b"" - if data: - yield data - - async def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]: - """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" - async for chunk in self._aiter_chunks(iterator): - # Split before decoding so splitlines() only uses \r and \n - for raw_line in chunk.splitlines(): - line = raw_line.decode("utf-8") - sse = self.decode(line) - if sse: - yield sse - - async def _aiter_chunks(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[bytes]: - """Given an iterator that yields raw binary data, iterate over it and yield individual SSE chunks""" - data = b"" - async for chunk in iterator: - for line in chunk.splitlines(keepends=True): - data += line - if data.endswith((b"\r\r", b"\n\n", b"\r\n\r\n")): - yield data - data = b"" - if data: - yield data - - def decode(self, line: str) -> ServerSentEvent | None: - # See: https://html.spec.whatwg.org/multipage/server-sent-events.html#event-stream-interpretation # noqa: E501 - - if not line: - if not self._event and not self._data and not self._last_event_id and self._retry is None: - return None - - sse = ServerSentEvent( - event=self._event, - data="\n".join(self._data), - id=self._last_event_id, - retry=self._retry, - ) - - # NOTE: as per the SSE spec, do not reset last_event_id. - self._event = None - self._data = [] - self._retry = None - - return sse - - if line.startswith(":"): - return None - - fieldname, _, value = line.partition(":") - - if value.startswith(" "): - value = value[1:] - - if fieldname == "event": - self._event = value - elif fieldname == "data": - self._data.append(value) - elif fieldname == "id": - if "\0" in value: - pass - else: - self._last_event_id = value - elif fieldname == "retry": - try: - self._retry = int(value) - except (TypeError, ValueError): - pass - else: - pass # Field is ignored. - - return None - - -@runtime_checkable -class SSEBytesDecoder(Protocol): - def iter_bytes(self, iterator: Iterator[bytes]) -> Iterator[ServerSentEvent]: - """Given an iterator that yields raw binary data, iterate over it & yield every event encountered""" - ... - - def aiter_bytes(self, iterator: AsyncIterator[bytes]) -> AsyncIterator[ServerSentEvent]: - """Given an async iterator that yields raw binary data, iterate over it & yield every event encountered""" - ... - - -def is_stream_class_type(typ: type) -> TypeGuard[type[Stream[object]] | type[AsyncStream[object]]]: - """TypeGuard for determining whether or not the given type is a subclass of `Stream` / `AsyncStream`""" - origin = get_origin(typ) or typ - return inspect.isclass(origin) and issubclass(origin, (Stream, AsyncStream)) - - -def extract_stream_chunk_type( - stream_cls: type, - *, - failure_message: str | None = None, -) -> type: - """Given a type like `Stream[T]`, returns the generic type variable `T`. - - This also handles the case where a concrete subclass is given, e.g. - ```py - class MyStream(Stream[bytes]): - ... - - extract_stream_chunk_type(MyStream) -> bytes - ``` - """ - from ._base_client import Stream, AsyncStream - - return extract_type_var_from_base( - stream_cls, - index=0, - generic_bases=cast("tuple[type, ...]", (Stream, AsyncStream)), - failure_message=failure_message, - ) diff --git a/myenv/Lib/site-packages/groq/_types.py b/myenv/Lib/site-packages/groq/_types.py deleted file mode 100644 index f85d73b..0000000 --- a/myenv/Lib/site-packages/groq/_types.py +++ /dev/null @@ -1,217 +0,0 @@ -from __future__ import annotations - -from os import PathLike -from typing import ( - IO, - TYPE_CHECKING, - Any, - Dict, - List, - Type, - Tuple, - Union, - Mapping, - TypeVar, - Callable, - Optional, - Sequence, -) -from typing_extensions import Literal, Protocol, TypeAlias, TypedDict, override, runtime_checkable - -import httpx -import pydantic -from httpx import URL, Proxy, Timeout, Response, BaseTransport, AsyncBaseTransport - -if TYPE_CHECKING: - from ._models import BaseModel - from ._response import APIResponse, AsyncAPIResponse - -Transport = BaseTransport -AsyncTransport = AsyncBaseTransport -Query = Mapping[str, object] -Body = object -AnyMapping = Mapping[str, object] -ModelT = TypeVar("ModelT", bound=pydantic.BaseModel) -_T = TypeVar("_T") - - -# Approximates httpx internal ProxiesTypes and RequestFiles types -# while adding support for `PathLike` instances -ProxiesDict = Dict["str | URL", Union[None, str, URL, Proxy]] -ProxiesTypes = Union[str, Proxy, ProxiesDict] -if TYPE_CHECKING: - Base64FileInput = Union[IO[bytes], PathLike[str]] - FileContent = Union[IO[bytes], bytes, PathLike[str]] -else: - Base64FileInput = Union[IO[bytes], PathLike] - FileContent = Union[IO[bytes], bytes, PathLike] # PathLike is not subscriptable in Python 3.8. -FileTypes = Union[ - # file (or bytes) - FileContent, - # (filename, file (or bytes)) - Tuple[Optional[str], FileContent], - # (filename, file (or bytes), content_type) - Tuple[Optional[str], FileContent, Optional[str]], - # (filename, file (or bytes), content_type, headers) - Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], -] -RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] - -# duplicate of the above but without our custom file support -HttpxFileContent = Union[IO[bytes], bytes] -HttpxFileTypes = Union[ - # file (or bytes) - HttpxFileContent, - # (filename, file (or bytes)) - Tuple[Optional[str], HttpxFileContent], - # (filename, file (or bytes), content_type) - Tuple[Optional[str], HttpxFileContent, Optional[str]], - # (filename, file (or bytes), content_type, headers) - Tuple[Optional[str], HttpxFileContent, Optional[str], Mapping[str, str]], -] -HttpxRequestFiles = Union[Mapping[str, HttpxFileTypes], Sequence[Tuple[str, HttpxFileTypes]]] - -# Workaround to support (cast_to: Type[ResponseT]) -> ResponseT -# where ResponseT includes `None`. In order to support directly -# passing `None`, overloads would have to be defined for every -# method that uses `ResponseT` which would lead to an unacceptable -# amount of code duplication and make it unreadable. See _base_client.py -# for example usage. -# -# This unfortunately means that you will either have -# to import this type and pass it explicitly: -# -# from groq import NoneType -# client.get('/foo', cast_to=NoneType) -# -# or build it yourself: -# -# client.get('/foo', cast_to=type(None)) -if TYPE_CHECKING: - NoneType: Type[None] -else: - NoneType = type(None) - - -class RequestOptions(TypedDict, total=False): - headers: Headers - max_retries: int - timeout: float | Timeout | None - params: Query - extra_json: AnyMapping - idempotency_key: str - - -# Sentinel class used until PEP 0661 is accepted -class NotGiven: - """ - A sentinel singleton class used to distinguish omitted keyword arguments - from those passed in with the value None (which may have different behavior). - - For example: - - ```py - def get(timeout: Union[int, NotGiven, None] = NotGiven()) -> Response: ... - - - get(timeout=1) # 1s timeout - get(timeout=None) # No timeout - get() # Default timeout behavior, which may not be statically known at the method definition. - ``` - """ - - def __bool__(self) -> Literal[False]: - return False - - @override - def __repr__(self) -> str: - return "NOT_GIVEN" - - -NotGivenOr = Union[_T, NotGiven] -NOT_GIVEN = NotGiven() - - -class Omit: - """In certain situations you need to be able to represent a case where a default value has - to be explicitly removed and `None` is not an appropriate substitute, for example: - - ```py - # as the default `Content-Type` header is `application/json` that will be sent - client.post("/upload/files", files={"file": b"my raw file content"}) - - # you can't explicitly override the header as it has to be dynamically generated - # to look something like: 'multipart/form-data; boundary=0d8382fcf5f8c3be01ca2e11002d2983' - client.post(..., headers={"Content-Type": "multipart/form-data"}) - - # instead you can remove the default `application/json` header by passing Omit - client.post(..., headers={"Content-Type": Omit()}) - ``` - """ - - def __bool__(self) -> Literal[False]: - return False - - -@runtime_checkable -class ModelBuilderProtocol(Protocol): - @classmethod - def build( - cls: type[_T], - *, - response: Response, - data: object, - ) -> _T: ... - - -Headers = Mapping[str, Union[str, Omit]] - - -class HeadersLikeProtocol(Protocol): - def get(self, __key: str) -> str | None: ... - - -HeadersLike = Union[Headers, HeadersLikeProtocol] - -ResponseT = TypeVar( - "ResponseT", - bound=Union[ - object, - str, - None, - "BaseModel", - List[Any], - Dict[str, Any], - Response, - ModelBuilderProtocol, - "APIResponse[Any]", - "AsyncAPIResponse[Any]", - ], -) - -StrBytesIntFloat = Union[str, bytes, int, float] - -# Note: copied from Pydantic -# https://github.com/pydantic/pydantic/blob/32ea570bf96e84234d2992e1ddf40ab8a565925a/pydantic/main.py#L49 -IncEx: TypeAlias = "set[int] | set[str] | dict[int, Any] | dict[str, Any] | None" - -PostParser = Callable[[Any], Any] - - -@runtime_checkable -class InheritsGeneric(Protocol): - """Represents a type that has inherited from `Generic` - - The `__orig_bases__` property can be used to determine the resolved - type variable for a given base class. - """ - - __orig_bases__: tuple[_GenericAlias] - - -class _GenericAlias(Protocol): - __origin__: type[object] - - -class HttpxSendArgs(TypedDict, total=False): - auth: httpx.Auth diff --git a/myenv/Lib/site-packages/groq/_utils/__init__.py b/myenv/Lib/site-packages/groq/_utils/__init__.py deleted file mode 100644 index 3efe66c..0000000 --- a/myenv/Lib/site-packages/groq/_utils/__init__.py +++ /dev/null @@ -1,55 +0,0 @@ -from ._sync import asyncify as asyncify -from ._proxy import LazyProxy as LazyProxy -from ._utils import ( - flatten as flatten, - is_dict as is_dict, - is_list as is_list, - is_given as is_given, - is_tuple as is_tuple, - lru_cache as lru_cache, - is_mapping as is_mapping, - is_tuple_t as is_tuple_t, - parse_date as parse_date, - is_iterable as is_iterable, - is_sequence as is_sequence, - coerce_float as coerce_float, - is_mapping_t as is_mapping_t, - removeprefix as removeprefix, - removesuffix as removesuffix, - extract_files as extract_files, - is_sequence_t as is_sequence_t, - required_args as required_args, - coerce_boolean as coerce_boolean, - coerce_integer as coerce_integer, - file_from_path as file_from_path, - parse_datetime as parse_datetime, - strip_not_given as strip_not_given, - deepcopy_minimal as deepcopy_minimal, - get_async_library as get_async_library, - maybe_coerce_float as maybe_coerce_float, - get_required_header as get_required_header, - maybe_coerce_boolean as maybe_coerce_boolean, - maybe_coerce_integer as maybe_coerce_integer, -) -from ._typing import ( - is_list_type as is_list_type, - is_union_type as is_union_type, - extract_type_arg as extract_type_arg, - is_iterable_type as is_iterable_type, - is_required_type as is_required_type, - is_annotated_type as is_annotated_type, - strip_annotated_type as strip_annotated_type, - extract_type_var_from_base as extract_type_var_from_base, -) -from ._streams import consume_sync_iterator as consume_sync_iterator, consume_async_iterator as consume_async_iterator -from ._transform import ( - PropertyInfo as PropertyInfo, - transform as transform, - async_transform as async_transform, - maybe_transform as maybe_transform, - async_maybe_transform as async_maybe_transform, -) -from ._reflection import ( - function_has_argument as function_has_argument, - assert_signatures_in_sync as assert_signatures_in_sync, -) diff --git a/myenv/Lib/site-packages/groq/_utils/_logs.py b/myenv/Lib/site-packages/groq/_utils/_logs.py deleted file mode 100644 index 091217c..0000000 --- a/myenv/Lib/site-packages/groq/_utils/_logs.py +++ /dev/null @@ -1,25 +0,0 @@ -import os -import logging - -logger: logging.Logger = logging.getLogger("groq") -httpx_logger: logging.Logger = logging.getLogger("httpx") - - -def _basic_config() -> None: - # e.g. [2023-10-05 14:12:26 - groq._base_client:818 - DEBUG] HTTP Request: POST http://127.0.0.1:4010/foo/bar "200 OK" - logging.basicConfig( - format="[%(asctime)s - %(name)s:%(lineno)d - %(levelname)s] %(message)s", - datefmt="%Y-%m-%d %H:%M:%S", - ) - - -def setup_logging() -> None: - env = os.environ.get("GROQ_LOG") - if env == "debug": - _basic_config() - logger.setLevel(logging.DEBUG) - httpx_logger.setLevel(logging.DEBUG) - elif env == "info": - _basic_config() - logger.setLevel(logging.INFO) - httpx_logger.setLevel(logging.INFO) diff --git a/myenv/Lib/site-packages/groq/_utils/_proxy.py b/myenv/Lib/site-packages/groq/_utils/_proxy.py deleted file mode 100644 index ffd883e..0000000 --- a/myenv/Lib/site-packages/groq/_utils/_proxy.py +++ /dev/null @@ -1,62 +0,0 @@ -from __future__ import annotations - -from abc import ABC, abstractmethod -from typing import Generic, TypeVar, Iterable, cast -from typing_extensions import override - -T = TypeVar("T") - - -class LazyProxy(Generic[T], ABC): - """Implements data methods to pretend that an instance is another instance. - - This includes forwarding attribute access and other methods. - """ - - # Note: we have to special case proxies that themselves return proxies - # to support using a proxy as a catch-all for any random access, e.g. `proxy.foo.bar.baz` - - def __getattr__(self, attr: str) -> object: - proxied = self.__get_proxied__() - if isinstance(proxied, LazyProxy): - return proxied # pyright: ignore - return getattr(proxied, attr) - - @override - def __repr__(self) -> str: - proxied = self.__get_proxied__() - if isinstance(proxied, LazyProxy): - return proxied.__class__.__name__ - return repr(self.__get_proxied__()) - - @override - def __str__(self) -> str: - proxied = self.__get_proxied__() - if isinstance(proxied, LazyProxy): - return proxied.__class__.__name__ - return str(proxied) - - @override - def __dir__(self) -> Iterable[str]: - proxied = self.__get_proxied__() - if isinstance(proxied, LazyProxy): - return [] - return proxied.__dir__() - - @property # type: ignore - @override - def __class__(self) -> type: # pyright: ignore - proxied = self.__get_proxied__() - if issubclass(type(proxied), LazyProxy): - return type(proxied) - return proxied.__class__ - - def __get_proxied__(self) -> T: - return self.__load__() - - def __as_proxied__(self) -> T: - """Helper method that returns the current proxy, typed as the loaded object""" - return cast(T, self) - - @abstractmethod - def __load__(self) -> T: ... diff --git a/myenv/Lib/site-packages/groq/_utils/_reflection.py b/myenv/Lib/site-packages/groq/_utils/_reflection.py deleted file mode 100644 index 89aa712..0000000 --- a/myenv/Lib/site-packages/groq/_utils/_reflection.py +++ /dev/null @@ -1,42 +0,0 @@ -from __future__ import annotations - -import inspect -from typing import Any, Callable - - -def function_has_argument(func: Callable[..., Any], arg_name: str) -> bool: - """Returns whether or not the given function has a specific parameter""" - sig = inspect.signature(func) - return arg_name in sig.parameters - - -def assert_signatures_in_sync( - source_func: Callable[..., Any], - check_func: Callable[..., Any], - *, - exclude_params: set[str] = set(), -) -> None: - """Ensure that the signature of the second function matches the first.""" - - check_sig = inspect.signature(check_func) - source_sig = inspect.signature(source_func) - - errors: list[str] = [] - - for name, source_param in source_sig.parameters.items(): - if name in exclude_params: - continue - - custom_param = check_sig.parameters.get(name) - if not custom_param: - errors.append(f"the `{name}` param is missing") - continue - - if custom_param.annotation != source_param.annotation: - errors.append( - f"types for the `{name}` param are do not match; source={repr(source_param.annotation)} checking={repr(custom_param.annotation)}" - ) - continue - - if errors: - raise AssertionError(f"{len(errors)} errors encountered when comparing signatures:\n\n" + "\n\n".join(errors)) diff --git a/myenv/Lib/site-packages/groq/_utils/_streams.py b/myenv/Lib/site-packages/groq/_utils/_streams.py deleted file mode 100644 index f4a0208..0000000 --- a/myenv/Lib/site-packages/groq/_utils/_streams.py +++ /dev/null @@ -1,12 +0,0 @@ -from typing import Any -from typing_extensions import Iterator, AsyncIterator - - -def consume_sync_iterator(iterator: Iterator[Any]) -> None: - for _ in iterator: - ... - - -async def consume_async_iterator(iterator: AsyncIterator[Any]) -> None: - async for _ in iterator: - ... diff --git a/myenv/Lib/site-packages/groq/_utils/_sync.py b/myenv/Lib/site-packages/groq/_utils/_sync.py deleted file mode 100644 index d0d8103..0000000 --- a/myenv/Lib/site-packages/groq/_utils/_sync.py +++ /dev/null @@ -1,81 +0,0 @@ -from __future__ import annotations - -import functools -from typing import TypeVar, Callable, Awaitable -from typing_extensions import ParamSpec - -import anyio -import anyio.to_thread - -from ._reflection import function_has_argument - -T_Retval = TypeVar("T_Retval") -T_ParamSpec = ParamSpec("T_ParamSpec") - - -# copied from `asyncer`, https://github.com/tiangolo/asyncer -def asyncify( - function: Callable[T_ParamSpec, T_Retval], - *, - cancellable: bool = False, - limiter: anyio.CapacityLimiter | None = None, -) -> Callable[T_ParamSpec, Awaitable[T_Retval]]: - """ - Take a blocking function and create an async one that receives the same - positional and keyword arguments, and that when called, calls the original function - in a worker thread using `anyio.to_thread.run_sync()`. Internally, - `asyncer.asyncify()` uses the same `anyio.to_thread.run_sync()`, but it supports - keyword arguments additional to positional arguments and it adds better support for - autocompletion and inline errors for the arguments of the function called and the - return value. - - If the `cancellable` option is enabled and the task waiting for its completion is - cancelled, the thread will still run its course but its return value (or any raised - exception) will be ignored. - - Use it like this: - - ```Python - def do_work(arg1, arg2, kwarg1="", kwarg2="") -> str: - # Do work - return "Some result" - - - result = await to_thread.asyncify(do_work)("spam", "ham", kwarg1="a", kwarg2="b") - print(result) - ``` - - ## Arguments - - `function`: a blocking regular callable (e.g. a function) - `cancellable`: `True` to allow cancellation of the operation - `limiter`: capacity limiter to use to limit the total amount of threads running - (if omitted, the default limiter is used) - - ## Return - - An async function that takes the same positional and keyword arguments as the - original one, that when called runs the same original function in a thread worker - and returns the result. - """ - - async def wrapper(*args: T_ParamSpec.args, **kwargs: T_ParamSpec.kwargs) -> T_Retval: - partial_f = functools.partial(function, *args, **kwargs) - - # In `v4.1.0` anyio added the `abandon_on_cancel` argument and deprecated the old - # `cancellable` argument, so we need to use the new `abandon_on_cancel` to avoid - # surfacing deprecation warnings. - if function_has_argument(anyio.to_thread.run_sync, "abandon_on_cancel"): - return await anyio.to_thread.run_sync( - partial_f, - abandon_on_cancel=cancellable, - limiter=limiter, - ) - - return await anyio.to_thread.run_sync( - partial_f, - cancellable=cancellable, - limiter=limiter, - ) - - return wrapper diff --git a/myenv/Lib/site-packages/groq/_utils/_transform.py b/myenv/Lib/site-packages/groq/_utils/_transform.py deleted file mode 100644 index 47e262a..0000000 --- a/myenv/Lib/site-packages/groq/_utils/_transform.py +++ /dev/null @@ -1,382 +0,0 @@ -from __future__ import annotations - -import io -import base64 -import pathlib -from typing import Any, Mapping, TypeVar, cast -from datetime import date, datetime -from typing_extensions import Literal, get_args, override, get_type_hints - -import anyio -import pydantic - -from ._utils import ( - is_list, - is_mapping, - is_iterable, -) -from .._files import is_base64_file_input -from ._typing import ( - is_list_type, - is_union_type, - extract_type_arg, - is_iterable_type, - is_required_type, - is_annotated_type, - strip_annotated_type, -) -from .._compat import model_dump, is_typeddict - -_T = TypeVar("_T") - - -# TODO: support for drilling globals() and locals() -# TODO: ensure works correctly with forward references in all cases - - -PropertyFormat = Literal["iso8601", "base64", "custom"] - - -class PropertyInfo: - """Metadata class to be used in Annotated types to provide information about a given type. - - For example: - - class MyParams(TypedDict): - account_holder_name: Annotated[str, PropertyInfo(alias='accountHolderName')] - - This means that {'account_holder_name': 'Robert'} will be transformed to {'accountHolderName': 'Robert'} before being sent to the API. - """ - - alias: str | None - format: PropertyFormat | None - format_template: str | None - discriminator: str | None - - def __init__( - self, - *, - alias: str | None = None, - format: PropertyFormat | None = None, - format_template: str | None = None, - discriminator: str | None = None, - ) -> None: - self.alias = alias - self.format = format - self.format_template = format_template - self.discriminator = discriminator - - @override - def __repr__(self) -> str: - return f"{self.__class__.__name__}(alias='{self.alias}', format={self.format}, format_template='{self.format_template}', discriminator='{self.discriminator}')" - - -def maybe_transform( - data: object, - expected_type: object, -) -> Any | None: - """Wrapper over `transform()` that allows `None` to be passed. - - See `transform()` for more details. - """ - if data is None: - return None - return transform(data, expected_type) - - -# Wrapper over _transform_recursive providing fake types -def transform( - data: _T, - expected_type: object, -) -> _T: - """Transform dictionaries based off of type information from the given type, for example: - - ```py - class Params(TypedDict, total=False): - card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] - - - transformed = transform({"card_id": ""}, Params) - # {'cardID': ''} - ``` - - Any keys / data that does not have type information given will be included as is. - - It should be noted that the transformations that this function does are not represented in the type system. - """ - transformed = _transform_recursive(data, annotation=cast(type, expected_type)) - return cast(_T, transformed) - - -def _get_annotated_type(type_: type) -> type | None: - """If the given type is an `Annotated` type then it is returned, if not `None` is returned. - - This also unwraps the type when applicable, e.g. `Required[Annotated[T, ...]]` - """ - if is_required_type(type_): - # Unwrap `Required[Annotated[T, ...]]` to `Annotated[T, ...]` - type_ = get_args(type_)[0] - - if is_annotated_type(type_): - return type_ - - return None - - -def _maybe_transform_key(key: str, type_: type) -> str: - """Transform the given `data` based on the annotations provided in `type_`. - - Note: this function only looks at `Annotated` types that contain `PropertInfo` metadata. - """ - annotated_type = _get_annotated_type(type_) - if annotated_type is None: - # no `Annotated` definition for this type, no transformation needed - return key - - # ignore the first argument as it is the actual type - annotations = get_args(annotated_type)[1:] - for annotation in annotations: - if isinstance(annotation, PropertyInfo) and annotation.alias is not None: - return annotation.alias - - return key - - -def _transform_recursive( - data: object, - *, - annotation: type, - inner_type: type | None = None, -) -> object: - """Transform the given data against the expected type. - - Args: - annotation: The direct type annotation given to the particular piece of data. - This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc - - inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type - is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in - the list can be transformed using the metadata from the container type. - - Defaults to the same value as the `annotation` argument. - """ - if inner_type is None: - inner_type = annotation - - stripped_type = strip_annotated_type(inner_type) - if is_typeddict(stripped_type) and is_mapping(data): - return _transform_typeddict(data, stripped_type) - - if ( - # List[T] - (is_list_type(stripped_type) and is_list(data)) - # Iterable[T] - or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) - ): - inner_type = extract_type_arg(stripped_type, 0) - return [_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] - - if is_union_type(stripped_type): - # For union types we run the transformation against all subtypes to ensure that everything is transformed. - # - # TODO: there may be edge cases where the same normalized field name will transform to two different names - # in different subtypes. - for subtype in get_args(stripped_type): - data = _transform_recursive(data, annotation=annotation, inner_type=subtype) - return data - - if isinstance(data, pydantic.BaseModel): - return model_dump(data, exclude_unset=True) - - annotated_type = _get_annotated_type(annotation) - if annotated_type is None: - return data - - # ignore the first argument as it is the actual type - annotations = get_args(annotated_type)[1:] - for annotation in annotations: - if isinstance(annotation, PropertyInfo) and annotation.format is not None: - return _format_data(data, annotation.format, annotation.format_template) - - return data - - -def _format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: - if isinstance(data, (date, datetime)): - if format_ == "iso8601": - return data.isoformat() - - if format_ == "custom" and format_template is not None: - return data.strftime(format_template) - - if format_ == "base64" and is_base64_file_input(data): - binary: str | bytes | None = None - - if isinstance(data, pathlib.Path): - binary = data.read_bytes() - elif isinstance(data, io.IOBase): - binary = data.read() - - if isinstance(binary, str): # type: ignore[unreachable] - binary = binary.encode() - - if not isinstance(binary, bytes): - raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") - - return base64.b64encode(binary).decode("ascii") - - return data - - -def _transform_typeddict( - data: Mapping[str, object], - expected_type: type, -) -> Mapping[str, object]: - result: dict[str, object] = {} - annotations = get_type_hints(expected_type, include_extras=True) - for key, value in data.items(): - type_ = annotations.get(key) - if type_ is None: - # we do not have a type annotation for this field, leave it as is - result[key] = value - else: - result[_maybe_transform_key(key, type_)] = _transform_recursive(value, annotation=type_) - return result - - -async def async_maybe_transform( - data: object, - expected_type: object, -) -> Any | None: - """Wrapper over `async_transform()` that allows `None` to be passed. - - See `async_transform()` for more details. - """ - if data is None: - return None - return await async_transform(data, expected_type) - - -async def async_transform( - data: _T, - expected_type: object, -) -> _T: - """Transform dictionaries based off of type information from the given type, for example: - - ```py - class Params(TypedDict, total=False): - card_id: Required[Annotated[str, PropertyInfo(alias="cardID")]] - - - transformed = transform({"card_id": ""}, Params) - # {'cardID': ''} - ``` - - Any keys / data that does not have type information given will be included as is. - - It should be noted that the transformations that this function does are not represented in the type system. - """ - transformed = await _async_transform_recursive(data, annotation=cast(type, expected_type)) - return cast(_T, transformed) - - -async def _async_transform_recursive( - data: object, - *, - annotation: type, - inner_type: type | None = None, -) -> object: - """Transform the given data against the expected type. - - Args: - annotation: The direct type annotation given to the particular piece of data. - This may or may not be wrapped in metadata types, e.g. `Required[T]`, `Annotated[T, ...]` etc - - inner_type: If applicable, this is the "inside" type. This is useful in certain cases where the outside type - is a container type such as `List[T]`. In that case `inner_type` should be set to `T` so that each entry in - the list can be transformed using the metadata from the container type. - - Defaults to the same value as the `annotation` argument. - """ - if inner_type is None: - inner_type = annotation - - stripped_type = strip_annotated_type(inner_type) - if is_typeddict(stripped_type) and is_mapping(data): - return await _async_transform_typeddict(data, stripped_type) - - if ( - # List[T] - (is_list_type(stripped_type) and is_list(data)) - # Iterable[T] - or (is_iterable_type(stripped_type) and is_iterable(data) and not isinstance(data, str)) - ): - inner_type = extract_type_arg(stripped_type, 0) - return [await _async_transform_recursive(d, annotation=annotation, inner_type=inner_type) for d in data] - - if is_union_type(stripped_type): - # For union types we run the transformation against all subtypes to ensure that everything is transformed. - # - # TODO: there may be edge cases where the same normalized field name will transform to two different names - # in different subtypes. - for subtype in get_args(stripped_type): - data = await _async_transform_recursive(data, annotation=annotation, inner_type=subtype) - return data - - if isinstance(data, pydantic.BaseModel): - return model_dump(data, exclude_unset=True) - - annotated_type = _get_annotated_type(annotation) - if annotated_type is None: - return data - - # ignore the first argument as it is the actual type - annotations = get_args(annotated_type)[1:] - for annotation in annotations: - if isinstance(annotation, PropertyInfo) and annotation.format is not None: - return await _async_format_data(data, annotation.format, annotation.format_template) - - return data - - -async def _async_format_data(data: object, format_: PropertyFormat, format_template: str | None) -> object: - if isinstance(data, (date, datetime)): - if format_ == "iso8601": - return data.isoformat() - - if format_ == "custom" and format_template is not None: - return data.strftime(format_template) - - if format_ == "base64" and is_base64_file_input(data): - binary: str | bytes | None = None - - if isinstance(data, pathlib.Path): - binary = await anyio.Path(data).read_bytes() - elif isinstance(data, io.IOBase): - binary = data.read() - - if isinstance(binary, str): # type: ignore[unreachable] - binary = binary.encode() - - if not isinstance(binary, bytes): - raise RuntimeError(f"Could not read bytes from {data}; Received {type(binary)}") - - return base64.b64encode(binary).decode("ascii") - - return data - - -async def _async_transform_typeddict( - data: Mapping[str, object], - expected_type: type, -) -> Mapping[str, object]: - result: dict[str, object] = {} - annotations = get_type_hints(expected_type, include_extras=True) - for key, value in data.items(): - type_ = annotations.get(key) - if type_ is None: - # we do not have a type annotation for this field, leave it as is - result[key] = value - else: - result[_maybe_transform_key(key, type_)] = await _async_transform_recursive(value, annotation=type_) - return result diff --git a/myenv/Lib/site-packages/groq/_utils/_typing.py b/myenv/Lib/site-packages/groq/_utils/_typing.py deleted file mode 100644 index c036991..0000000 --- a/myenv/Lib/site-packages/groq/_utils/_typing.py +++ /dev/null @@ -1,120 +0,0 @@ -from __future__ import annotations - -from typing import Any, TypeVar, Iterable, cast -from collections import abc as _c_abc -from typing_extensions import Required, Annotated, get_args, get_origin - -from .._types import InheritsGeneric -from .._compat import is_union as _is_union - - -def is_annotated_type(typ: type) -> bool: - return get_origin(typ) == Annotated - - -def is_list_type(typ: type) -> bool: - return (get_origin(typ) or typ) == list - - -def is_iterable_type(typ: type) -> bool: - """If the given type is `typing.Iterable[T]`""" - origin = get_origin(typ) or typ - return origin == Iterable or origin == _c_abc.Iterable - - -def is_union_type(typ: type) -> bool: - return _is_union(get_origin(typ)) - - -def is_required_type(typ: type) -> bool: - return get_origin(typ) == Required - - -def is_typevar(typ: type) -> bool: - # type ignore is required because type checkers - # think this expression will always return False - return type(typ) == TypeVar # type: ignore - - -# Extracts T from Annotated[T, ...] or from Required[Annotated[T, ...]] -def strip_annotated_type(typ: type) -> type: - if is_required_type(typ) or is_annotated_type(typ): - return strip_annotated_type(cast(type, get_args(typ)[0])) - - return typ - - -def extract_type_arg(typ: type, index: int) -> type: - args = get_args(typ) - try: - return cast(type, args[index]) - except IndexError as err: - raise RuntimeError(f"Expected type {typ} to have a type argument at index {index} but it did not") from err - - -def extract_type_var_from_base( - typ: type, - *, - generic_bases: tuple[type, ...], - index: int, - failure_message: str | None = None, -) -> type: - """Given a type like `Foo[T]`, returns the generic type variable `T`. - - This also handles the case where a concrete subclass is given, e.g. - ```py - class MyResponse(Foo[bytes]): - ... - - extract_type_var(MyResponse, bases=(Foo,), index=0) -> bytes - ``` - - And where a generic subclass is given: - ```py - _T = TypeVar('_T') - class MyResponse(Foo[_T]): - ... - - extract_type_var(MyResponse[bytes], bases=(Foo,), index=0) -> bytes - ``` - """ - cls = cast(object, get_origin(typ) or typ) - if cls in generic_bases: - # we're given the class directly - return extract_type_arg(typ, index) - - # if a subclass is given - # --- - # this is needed as __orig_bases__ is not present in the typeshed stubs - # because it is intended to be for internal use only, however there does - # not seem to be a way to resolve generic TypeVars for inherited subclasses - # without using it. - if isinstance(cls, InheritsGeneric): - target_base_class: Any | None = None - for base in cls.__orig_bases__: - if base.__origin__ in generic_bases: - target_base_class = base - break - - if target_base_class is None: - raise RuntimeError( - "Could not find the generic base class;\n" - "This should never happen;\n" - f"Does {cls} inherit from one of {generic_bases} ?" - ) - - extracted = extract_type_arg(target_base_class, index) - if is_typevar(extracted): - # If the extracted type argument is itself a type variable - # then that means the subclass itself is generic, so we have - # to resolve the type argument from the class itself, not - # the base class. - # - # Note: if there is more than 1 type argument, the subclass could - # change the ordering of the type arguments, this is not currently - # supported. - return extract_type_arg(typ, index) - - return extracted - - raise RuntimeError(failure_message or f"Could not resolve inner type variable at index {index} for {typ}") diff --git a/myenv/Lib/site-packages/groq/_utils/_utils.py b/myenv/Lib/site-packages/groq/_utils/_utils.py deleted file mode 100644 index 2fc5a1c..0000000 --- a/myenv/Lib/site-packages/groq/_utils/_utils.py +++ /dev/null @@ -1,396 +0,0 @@ -from __future__ import annotations - -import os -import re -import inspect -import functools -from typing import ( - Any, - Tuple, - Mapping, - TypeVar, - Callable, - Iterable, - Sequence, - cast, - overload, -) -from pathlib import Path -from typing_extensions import TypeGuard - -import sniffio - -from .._types import NotGiven, FileTypes, NotGivenOr, HeadersLike -from .._compat import parse_date as parse_date, parse_datetime as parse_datetime - -_T = TypeVar("_T") -_TupleT = TypeVar("_TupleT", bound=Tuple[object, ...]) -_MappingT = TypeVar("_MappingT", bound=Mapping[str, object]) -_SequenceT = TypeVar("_SequenceT", bound=Sequence[object]) -CallableT = TypeVar("CallableT", bound=Callable[..., Any]) - - -def flatten(t: Iterable[Iterable[_T]]) -> list[_T]: - return [item for sublist in t for item in sublist] - - -def extract_files( - # TODO: this needs to take Dict but variance issues..... - # create protocol type ? - query: Mapping[str, object], - *, - paths: Sequence[Sequence[str]], -) -> list[tuple[str, FileTypes]]: - """Recursively extract files from the given dictionary based on specified paths. - - A path may look like this ['foo', 'files', '', 'data']. - - Note: this mutates the given dictionary. - """ - files: list[tuple[str, FileTypes]] = [] - for path in paths: - files.extend(_extract_items(query, path, index=0, flattened_key=None)) - return files - - -def _extract_items( - obj: object, - path: Sequence[str], - *, - index: int, - flattened_key: str | None, -) -> list[tuple[str, FileTypes]]: - try: - key = path[index] - except IndexError: - if isinstance(obj, NotGiven): - # no value was provided - we can safely ignore - return [] - - # cyclical import - from .._files import assert_is_file_content - - # We have exhausted the path, return the entry we found. - assert_is_file_content(obj, key=flattened_key) - assert flattened_key is not None - return [(flattened_key, cast(FileTypes, obj))] - - index += 1 - if is_dict(obj): - try: - # We are at the last entry in the path so we must remove the field - if (len(path)) == index: - item = obj.pop(key) - else: - item = obj[key] - except KeyError: - # Key was not present in the dictionary, this is not indicative of an error - # as the given path may not point to a required field. We also do not want - # to enforce required fields as the API may differ from the spec in some cases. - return [] - if flattened_key is None: - flattened_key = key - else: - flattened_key += f"[{key}]" - return _extract_items( - item, - path, - index=index, - flattened_key=flattened_key, - ) - elif is_list(obj): - if key != "": - return [] - - return flatten( - [ - _extract_items( - item, - path, - index=index, - flattened_key=flattened_key + "[]" if flattened_key is not None else "[]", - ) - for item in obj - ] - ) - - # Something unexpected was passed, just ignore it. - return [] - - -def is_given(obj: NotGivenOr[_T]) -> TypeGuard[_T]: - return not isinstance(obj, NotGiven) - - -# Type safe methods for narrowing types with TypeVars. -# The default narrowing for isinstance(obj, dict) is dict[unknown, unknown], -# however this cause Pyright to rightfully report errors. As we know we don't -# care about the contained types we can safely use `object` in it's place. -# -# There are two separate functions defined, `is_*` and `is_*_t` for different use cases. -# `is_*` is for when you're dealing with an unknown input -# `is_*_t` is for when you're narrowing a known union type to a specific subset - - -def is_tuple(obj: object) -> TypeGuard[tuple[object, ...]]: - return isinstance(obj, tuple) - - -def is_tuple_t(obj: _TupleT | object) -> TypeGuard[_TupleT]: - return isinstance(obj, tuple) - - -def is_sequence(obj: object) -> TypeGuard[Sequence[object]]: - return isinstance(obj, Sequence) - - -def is_sequence_t(obj: _SequenceT | object) -> TypeGuard[_SequenceT]: - return isinstance(obj, Sequence) - - -def is_mapping(obj: object) -> TypeGuard[Mapping[str, object]]: - return isinstance(obj, Mapping) - - -def is_mapping_t(obj: _MappingT | object) -> TypeGuard[_MappingT]: - return isinstance(obj, Mapping) - - -def is_dict(obj: object) -> TypeGuard[dict[object, object]]: - return isinstance(obj, dict) - - -def is_list(obj: object) -> TypeGuard[list[object]]: - return isinstance(obj, list) - - -def is_iterable(obj: object) -> TypeGuard[Iterable[object]]: - return isinstance(obj, Iterable) - - -def deepcopy_minimal(item: _T) -> _T: - """Minimal reimplementation of copy.deepcopy() that will only copy certain object types: - - - mappings, e.g. `dict` - - list - - This is done for performance reasons. - """ - if is_mapping(item): - return cast(_T, {k: deepcopy_minimal(v) for k, v in item.items()}) - if is_list(item): - return cast(_T, [deepcopy_minimal(entry) for entry in item]) - return item - - -# copied from https://github.com/Rapptz/RoboDanny -def human_join(seq: Sequence[str], *, delim: str = ", ", final: str = "or") -> str: - size = len(seq) - if size == 0: - return "" - - if size == 1: - return seq[0] - - if size == 2: - return f"{seq[0]} {final} {seq[1]}" - - return delim.join(seq[:-1]) + f" {final} {seq[-1]}" - - -def quote(string: str) -> str: - """Add single quotation marks around the given string. Does *not* do any escaping.""" - return f"'{string}'" - - -def required_args(*variants: Sequence[str]) -> Callable[[CallableT], CallableT]: - """Decorator to enforce a given set of arguments or variants of arguments are passed to the decorated function. - - Useful for enforcing runtime validation of overloaded functions. - - Example usage: - ```py - @overload - def foo(*, a: str) -> str: ... - - - @overload - def foo(*, b: bool) -> str: ... - - - # This enforces the same constraints that a static type checker would - # i.e. that either a or b must be passed to the function - @required_args(["a"], ["b"]) - def foo(*, a: str | None = None, b: bool | None = None) -> str: ... - ``` - """ - - def inner(func: CallableT) -> CallableT: - params = inspect.signature(func).parameters - positional = [ - name - for name, param in params.items() - if param.kind - in { - param.POSITIONAL_ONLY, - param.POSITIONAL_OR_KEYWORD, - } - ] - - @functools.wraps(func) - def wrapper(*args: object, **kwargs: object) -> object: - given_params: set[str] = set() - for i, _ in enumerate(args): - try: - given_params.add(positional[i]) - except IndexError: - raise TypeError( - f"{func.__name__}() takes {len(positional)} argument(s) but {len(args)} were given" - ) from None - - for key in kwargs.keys(): - given_params.add(key) - - for variant in variants: - matches = all((param in given_params for param in variant)) - if matches: - break - else: # no break - if len(variants) > 1: - variations = human_join( - ["(" + human_join([quote(arg) for arg in variant], final="and") + ")" for variant in variants] - ) - msg = f"Missing required arguments; Expected either {variations} arguments to be given" - else: - assert len(variants) > 0 - - # TODO: this error message is not deterministic - missing = list(set(variants[0]) - given_params) - if len(missing) > 1: - msg = f"Missing required arguments: {human_join([quote(arg) for arg in missing])}" - else: - msg = f"Missing required argument: {quote(missing[0])}" - raise TypeError(msg) - return func(*args, **kwargs) - - return wrapper # type: ignore - - return inner - - -_K = TypeVar("_K") -_V = TypeVar("_V") - - -@overload -def strip_not_given(obj: None) -> None: ... - - -@overload -def strip_not_given(obj: Mapping[_K, _V | NotGiven]) -> dict[_K, _V]: ... - - -@overload -def strip_not_given(obj: object) -> object: ... - - -def strip_not_given(obj: object | None) -> object: - """Remove all top-level keys where their values are instances of `NotGiven`""" - if obj is None: - return None - - if not is_mapping(obj): - return obj - - return {key: value for key, value in obj.items() if not isinstance(value, NotGiven)} - - -def coerce_integer(val: str) -> int: - return int(val, base=10) - - -def coerce_float(val: str) -> float: - return float(val) - - -def coerce_boolean(val: str) -> bool: - return val == "true" or val == "1" or val == "on" - - -def maybe_coerce_integer(val: str | None) -> int | None: - if val is None: - return None - return coerce_integer(val) - - -def maybe_coerce_float(val: str | None) -> float | None: - if val is None: - return None - return coerce_float(val) - - -def maybe_coerce_boolean(val: str | None) -> bool | None: - if val is None: - return None - return coerce_boolean(val) - - -def removeprefix(string: str, prefix: str) -> str: - """Remove a prefix from a string. - - Backport of `str.removeprefix` for Python < 3.9 - """ - if string.startswith(prefix): - return string[len(prefix) :] - return string - - -def removesuffix(string: str, suffix: str) -> str: - """Remove a suffix from a string. - - Backport of `str.removesuffix` for Python < 3.9 - """ - if string.endswith(suffix): - return string[: -len(suffix)] - return string - - -def file_from_path(path: str) -> FileTypes: - contents = Path(path).read_bytes() - file_name = os.path.basename(path) - return (file_name, contents) - - -def get_required_header(headers: HeadersLike, header: str) -> str: - lower_header = header.lower() - if isinstance(headers, Mapping): - for k, v in headers.items(): - if k.lower() == lower_header and isinstance(v, str): - return v - - """ to deal with the case where the header looks like Stainless-Event-Id """ - intercaps_header = re.sub(r"([^\w])(\w)", lambda pat: pat.group(1) + pat.group(2).upper(), header.capitalize()) - - for normalized_header in [header, lower_header, header.upper(), intercaps_header]: - value = headers.get(normalized_header) - if value: - return value - - raise ValueError(f"Could not find {header} header") - - -def get_async_library() -> str: - try: - return sniffio.current_async_library() - except Exception: - return "false" - - -def lru_cache(*, maxsize: int | None = 128) -> Callable[[CallableT], CallableT]: - """A version of functools.lru_cache that retains the type signature - for the wrapped function arguments. - """ - wrapper = functools.lru_cache( # noqa: TID251 - maxsize=maxsize, - ) - return cast(Any, wrapper) # type: ignore[no-any-return] diff --git a/myenv/Lib/site-packages/groq/_version.py b/myenv/Lib/site-packages/groq/_version.py deleted file mode 100644 index 653c26f..0000000 --- a/myenv/Lib/site-packages/groq/_version.py +++ /dev/null @@ -1,4 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -__title__ = "groq" -__version__ = "0.11.0" # x-release-please-version diff --git a/myenv/Lib/site-packages/groq/lib/.keep b/myenv/Lib/site-packages/groq/lib/.keep deleted file mode 100644 index 5e2c99f..0000000 --- a/myenv/Lib/site-packages/groq/lib/.keep +++ /dev/null @@ -1,4 +0,0 @@ -File generated from our OpenAPI spec by Stainless. - -This directory can be used to store custom files to expand the SDK. -It is ignored by Stainless code generation and its content (other than this keep file) won't be touched. \ No newline at end of file diff --git a/myenv/Lib/site-packages/groq/py.typed b/myenv/Lib/site-packages/groq/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/groq/resources/__init__.py b/myenv/Lib/site-packages/groq/resources/__init__.py deleted file mode 100644 index 4f29788..0000000 --- a/myenv/Lib/site-packages/groq/resources/__init__.py +++ /dev/null @@ -1,61 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .chat import ( - Chat, - AsyncChat, - ChatWithRawResponse, - AsyncChatWithRawResponse, - ChatWithStreamingResponse, - AsyncChatWithStreamingResponse, -) -from .audio import ( - Audio, - AsyncAudio, - AudioWithRawResponse, - AsyncAudioWithRawResponse, - AudioWithStreamingResponse, - AsyncAudioWithStreamingResponse, -) -from .models import ( - Models, - AsyncModels, - ModelsWithRawResponse, - AsyncModelsWithRawResponse, - ModelsWithStreamingResponse, - AsyncModelsWithStreamingResponse, -) -from .embeddings import ( - Embeddings, - AsyncEmbeddings, - EmbeddingsWithRawResponse, - AsyncEmbeddingsWithRawResponse, - EmbeddingsWithStreamingResponse, - AsyncEmbeddingsWithStreamingResponse, -) - -__all__ = [ - "Chat", - "AsyncChat", - "ChatWithRawResponse", - "AsyncChatWithRawResponse", - "ChatWithStreamingResponse", - "AsyncChatWithStreamingResponse", - "Embeddings", - "AsyncEmbeddings", - "EmbeddingsWithRawResponse", - "AsyncEmbeddingsWithRawResponse", - "EmbeddingsWithStreamingResponse", - "AsyncEmbeddingsWithStreamingResponse", - "Audio", - "AsyncAudio", - "AudioWithRawResponse", - "AsyncAudioWithRawResponse", - "AudioWithStreamingResponse", - "AsyncAudioWithStreamingResponse", - "Models", - "AsyncModels", - "ModelsWithRawResponse", - "AsyncModelsWithRawResponse", - "ModelsWithStreamingResponse", - "AsyncModelsWithStreamingResponse", -] diff --git a/myenv/Lib/site-packages/groq/resources/audio/__init__.py b/myenv/Lib/site-packages/groq/resources/audio/__init__.py deleted file mode 100644 index 40e9ab2..0000000 --- a/myenv/Lib/site-packages/groq/resources/audio/__init__.py +++ /dev/null @@ -1,47 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .audio import ( - Audio, - AsyncAudio, - AudioWithRawResponse, - AsyncAudioWithRawResponse, - AudioWithStreamingResponse, - AsyncAudioWithStreamingResponse, -) -from .translations import ( - Translations, - AsyncTranslations, - TranslationsWithRawResponse, - AsyncTranslationsWithRawResponse, - TranslationsWithStreamingResponse, - AsyncTranslationsWithStreamingResponse, -) -from .transcriptions import ( - Transcriptions, - AsyncTranscriptions, - TranscriptionsWithRawResponse, - AsyncTranscriptionsWithRawResponse, - TranscriptionsWithStreamingResponse, - AsyncTranscriptionsWithStreamingResponse, -) - -__all__ = [ - "Transcriptions", - "AsyncTranscriptions", - "TranscriptionsWithRawResponse", - "AsyncTranscriptionsWithRawResponse", - "TranscriptionsWithStreamingResponse", - "AsyncTranscriptionsWithStreamingResponse", - "Translations", - "AsyncTranslations", - "TranslationsWithRawResponse", - "AsyncTranslationsWithRawResponse", - "TranslationsWithStreamingResponse", - "AsyncTranslationsWithStreamingResponse", - "Audio", - "AsyncAudio", - "AudioWithRawResponse", - "AsyncAudioWithRawResponse", - "AudioWithStreamingResponse", - "AsyncAudioWithStreamingResponse", -] diff --git a/myenv/Lib/site-packages/groq/resources/audio/audio.py b/myenv/Lib/site-packages/groq/resources/audio/audio.py deleted file mode 100644 index 728ba29..0000000 --- a/myenv/Lib/site-packages/groq/resources/audio/audio.py +++ /dev/null @@ -1,112 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from ..._compat import cached_property -from ..._resource import SyncAPIResource, AsyncAPIResource -from .translations import ( - Translations, - AsyncTranslations, - TranslationsWithRawResponse, - AsyncTranslationsWithRawResponse, - TranslationsWithStreamingResponse, - AsyncTranslationsWithStreamingResponse, -) -from .transcriptions import ( - Transcriptions, - AsyncTranscriptions, - TranscriptionsWithRawResponse, - AsyncTranscriptionsWithRawResponse, - TranscriptionsWithStreamingResponse, - AsyncTranscriptionsWithStreamingResponse, -) - -__all__ = ["Audio", "AsyncAudio"] - - -class Audio(SyncAPIResource): - @cached_property - def transcriptions(self) -> Transcriptions: - return Transcriptions(self._client) - - @cached_property - def translations(self) -> Translations: - return Translations(self._client) - - @cached_property - def with_raw_response(self) -> AudioWithRawResponse: - return AudioWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AudioWithStreamingResponse: - return AudioWithStreamingResponse(self) - - -class AsyncAudio(AsyncAPIResource): - @cached_property - def transcriptions(self) -> AsyncTranscriptions: - return AsyncTranscriptions(self._client) - - @cached_property - def translations(self) -> AsyncTranslations: - return AsyncTranslations(self._client) - - @cached_property - def with_raw_response(self) -> AsyncAudioWithRawResponse: - return AsyncAudioWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncAudioWithStreamingResponse: - return AsyncAudioWithStreamingResponse(self) - - -class AudioWithRawResponse: - def __init__(self, audio: Audio) -> None: - self._audio = audio - - @cached_property - def transcriptions(self) -> TranscriptionsWithRawResponse: - return TranscriptionsWithRawResponse(self._audio.transcriptions) - - @cached_property - def translations(self) -> TranslationsWithRawResponse: - return TranslationsWithRawResponse(self._audio.translations) - - -class AsyncAudioWithRawResponse: - def __init__(self, audio: AsyncAudio) -> None: - self._audio = audio - - @cached_property - def transcriptions(self) -> AsyncTranscriptionsWithRawResponse: - return AsyncTranscriptionsWithRawResponse(self._audio.transcriptions) - - @cached_property - def translations(self) -> AsyncTranslationsWithRawResponse: - return AsyncTranslationsWithRawResponse(self._audio.translations) - - -class AudioWithStreamingResponse: - def __init__(self, audio: Audio) -> None: - self._audio = audio - - @cached_property - def transcriptions(self) -> TranscriptionsWithStreamingResponse: - return TranscriptionsWithStreamingResponse(self._audio.transcriptions) - - @cached_property - def translations(self) -> TranslationsWithStreamingResponse: - return TranslationsWithStreamingResponse(self._audio.translations) - - -class AsyncAudioWithStreamingResponse: - def __init__(self, audio: AsyncAudio) -> None: - self._audio = audio - - @cached_property - def transcriptions(self) -> AsyncTranscriptionsWithStreamingResponse: - return AsyncTranscriptionsWithStreamingResponse(self._audio.transcriptions) - - @cached_property - def translations(self) -> AsyncTranslationsWithStreamingResponse: - return AsyncTranslationsWithStreamingResponse(self._audio.translations) diff --git a/myenv/Lib/site-packages/groq/resources/audio/transcriptions.py b/myenv/Lib/site-packages/groq/resources/audio/transcriptions.py deleted file mode 100644 index f39363c..0000000 --- a/myenv/Lib/site-packages/groq/resources/audio/transcriptions.py +++ /dev/null @@ -1,461 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List, Union, Mapping, cast -from typing_extensions import Literal - -import httpx - -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes -from ..._utils import ( - extract_files, - maybe_transform, - deepcopy_minimal, - async_maybe_transform, -) -from ..._compat import cached_property -from ..._resource import SyncAPIResource, AsyncAPIResource -from ..._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from ...types.audio import transcription_create_params -from ..._base_client import make_request_options -from ...types.audio.transcription import Transcription - -__all__ = ["Transcriptions", "AsyncTranscriptions"] - - -class Transcriptions(SyncAPIResource): - @cached_property - def with_raw_response(self) -> TranscriptionsWithRawResponse: - return TranscriptionsWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> TranscriptionsWithStreamingResponse: - return TranscriptionsWithStreamingResponse(self) - - def create( - self, - *, - file: FileTypes, - model: Union[str, Literal["whisper-large-v3"]], - language: Union[ - str, - Literal[ - "en", - "zh", - "de", - "es", - "ru", - "ko", - "fr", - "ja", - "pt", - "tr", - "pl", - "ca", - "nl", - "ar", - "sv", - "it", - "id", - "hi", - "fi", - "vi", - "he", - "uk", - "el", - "ms", - "cs", - "ro", - "da", - "hu", - "ta", - "th", - "ur", - "hr", - "bg", - "lt", - "la", - "mi", - "ml", - "cy", - "sk", - "te", - "fa", - "lv", - "bn", - "sr", - "az", - "sl", - "kn", - "et", - "mk", - "br", - "eu", - "is", - "hy", - "ne", - "mn", - "bs", - "kk", - "sq", - "sw", - "gl", - "mr", - "pa", - "si", - "km", - "sn", - "yo", - "so", - "af", - "oc", - "ka", - "be", - "tg", - "sd", - "gu", - "am", - "yi", - "lo", - "uz", - "fo", - "ht", - "ps", - "tk", - "nn", - "mt", - "sa", - "lb", - "my", - "bo", - "tl", - "mg", - "as", - "tt", - "haw", - "ln", - "ha", - "ba", - "jv", - "su", - "yue", - ], - ] - | NotGiven = NOT_GIVEN, - prompt: str | NotGiven = NOT_GIVEN, - response_format: Literal["json", "text", "verbose_json"] | NotGiven = NOT_GIVEN, - temperature: float | NotGiven = NOT_GIVEN, - timestamp_granularities: List[Literal["word", "segment"]] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Transcription: - """ - Transcribes audio into the input language. - - Args: - file: - The audio file object (not file name) to transcribe, in one of these formats: - flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - - model: ID of the model to use. Only `whisper-large-v3` is currently available. - - language: The language of the input audio. Supplying the input language in - [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) format will - improve accuracy and latency. - - prompt: An optional text to guide the model's style or continue a previous audio - segment. The [prompt](/docs/guides/speech-to-text/prompting) should match the - audio language. - - response_format: The format of the transcript output, in one of these options: `json`, `text`, or - `verbose_json`. - - temperature: The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - output more random, while lower values like 0.2 will make it more focused and - deterministic. If set to 0, the model will use - [log probability](https://en.wikipedia.org/wiki/Log_probability) to - automatically increase the temperature until certain thresholds are hit. - - timestamp_granularities: The timestamp granularities to populate for this transcription. - `response_format` must be set `verbose_json` to use timestamp granularities. - Either or both of these options are supported: `word`, or `segment`. Note: There - is no additional latency for segment timestamps, but generating word timestamps - incurs additional latency. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - body = deepcopy_minimal( - { - "file": file, - "model": model, - "language": language, - "prompt": prompt, - "response_format": response_format, - "temperature": temperature, - "timestamp_granularities": timestamp_granularities, - } - ) - files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) - # It should be noted that the actual Content-Type header that will be - # sent to the server will contain a `boundary` parameter, e.g. - # multipart/form-data; boundary=---abc-- - extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} - return self._post( - "/openai/v1/audio/transcriptions", - body=maybe_transform(body, transcription_create_params.TranscriptionCreateParams), - files=files, - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=Transcription, - ) - - -class AsyncTranscriptions(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncTranscriptionsWithRawResponse: - return AsyncTranscriptionsWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncTranscriptionsWithStreamingResponse: - return AsyncTranscriptionsWithStreamingResponse(self) - - async def create( - self, - *, - file: FileTypes, - model: Union[str, Literal["whisper-large-v3"]], - language: Union[ - str, - Literal[ - "en", - "zh", - "de", - "es", - "ru", - "ko", - "fr", - "ja", - "pt", - "tr", - "pl", - "ca", - "nl", - "ar", - "sv", - "it", - "id", - "hi", - "fi", - "vi", - "he", - "uk", - "el", - "ms", - "cs", - "ro", - "da", - "hu", - "ta", - "th", - "ur", - "hr", - "bg", - "lt", - "la", - "mi", - "ml", - "cy", - "sk", - "te", - "fa", - "lv", - "bn", - "sr", - "az", - "sl", - "kn", - "et", - "mk", - "br", - "eu", - "is", - "hy", - "ne", - "mn", - "bs", - "kk", - "sq", - "sw", - "gl", - "mr", - "pa", - "si", - "km", - "sn", - "yo", - "so", - "af", - "oc", - "ka", - "be", - "tg", - "sd", - "gu", - "am", - "yi", - "lo", - "uz", - "fo", - "ht", - "ps", - "tk", - "nn", - "mt", - "sa", - "lb", - "my", - "bo", - "tl", - "mg", - "as", - "tt", - "haw", - "ln", - "ha", - "ba", - "jv", - "su", - "yue", - ], - ] - | NotGiven = NOT_GIVEN, - prompt: str | NotGiven = NOT_GIVEN, - response_format: Literal["json", "text", "verbose_json"] | NotGiven = NOT_GIVEN, - temperature: float | NotGiven = NOT_GIVEN, - timestamp_granularities: List[Literal["word", "segment"]] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Transcription: - """ - Transcribes audio into the input language. - - Args: - file: - The audio file object (not file name) to transcribe, in one of these formats: - flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - - model: ID of the model to use. Only `whisper-large-v3` is currently available. - - language: The language of the input audio. Supplying the input language in - [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) format will - improve accuracy and latency. - - prompt: An optional text to guide the model's style or continue a previous audio - segment. The [prompt](/docs/guides/speech-to-text/prompting) should match the - audio language. - - response_format: The format of the transcript output, in one of these options: `json`, `text`, or - `verbose_json`. - - temperature: The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - output more random, while lower values like 0.2 will make it more focused and - deterministic. If set to 0, the model will use - [log probability](https://en.wikipedia.org/wiki/Log_probability) to - automatically increase the temperature until certain thresholds are hit. - - timestamp_granularities: The timestamp granularities to populate for this transcription. - `response_format` must be set `verbose_json` to use timestamp granularities. - Either or both of these options are supported: `word`, or `segment`. Note: There - is no additional latency for segment timestamps, but generating word timestamps - incurs additional latency. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - body = deepcopy_minimal( - { - "file": file, - "model": model, - "language": language, - "prompt": prompt, - "response_format": response_format, - "temperature": temperature, - "timestamp_granularities": timestamp_granularities, - } - ) - files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) - # It should be noted that the actual Content-Type header that will be - # sent to the server will contain a `boundary` parameter, e.g. - # multipart/form-data; boundary=---abc-- - extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} - return await self._post( - "/openai/v1/audio/transcriptions", - body=await async_maybe_transform(body, transcription_create_params.TranscriptionCreateParams), - files=files, - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=Transcription, - ) - - -class TranscriptionsWithRawResponse: - def __init__(self, transcriptions: Transcriptions) -> None: - self._transcriptions = transcriptions - - self.create = to_raw_response_wrapper( - transcriptions.create, - ) - - -class AsyncTranscriptionsWithRawResponse: - def __init__(self, transcriptions: AsyncTranscriptions) -> None: - self._transcriptions = transcriptions - - self.create = async_to_raw_response_wrapper( - transcriptions.create, - ) - - -class TranscriptionsWithStreamingResponse: - def __init__(self, transcriptions: Transcriptions) -> None: - self._transcriptions = transcriptions - - self.create = to_streamed_response_wrapper( - transcriptions.create, - ) - - -class AsyncTranscriptionsWithStreamingResponse: - def __init__(self, transcriptions: AsyncTranscriptions) -> None: - self._transcriptions = transcriptions - - self.create = async_to_streamed_response_wrapper( - transcriptions.create, - ) diff --git a/myenv/Lib/site-packages/groq/resources/audio/translations.py b/myenv/Lib/site-packages/groq/resources/audio/translations.py deleted file mode 100644 index 101a797..0000000 --- a/myenv/Lib/site-packages/groq/resources/audio/translations.py +++ /dev/null @@ -1,223 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Union, Mapping, cast -from typing_extensions import Literal - -import httpx - -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven, FileTypes -from ..._utils import ( - extract_files, - maybe_transform, - deepcopy_minimal, - async_maybe_transform, -) -from ..._compat import cached_property -from ..._resource import SyncAPIResource, AsyncAPIResource -from ..._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from ...types.audio import translation_create_params -from ..._base_client import make_request_options -from ...types.audio.translation import Translation - -__all__ = ["Translations", "AsyncTranslations"] - - -class Translations(SyncAPIResource): - @cached_property - def with_raw_response(self) -> TranslationsWithRawResponse: - return TranslationsWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> TranslationsWithStreamingResponse: - return TranslationsWithStreamingResponse(self) - - def create( - self, - *, - file: FileTypes, - model: Union[str, Literal["whisper-large-v3"]], - prompt: str | NotGiven = NOT_GIVEN, - response_format: Literal["json", "text", "verbose_json"] | NotGiven = NOT_GIVEN, - temperature: float | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Translation: - """ - Translates audio into English. - - Args: - file: The audio file object (not file name) translate, in one of these formats: flac, - mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - - model: ID of the model to use. Only `whisper-large-v3` is currently available. - - prompt: An optional text to guide the model's style or continue a previous audio - segment. The [prompt](/docs/guides/speech-to-text/prompting) should be in - English. - - response_format: The format of the transcript output, in one of these options: `json`, `text`, or - `verbose_json`. - - temperature: The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - output more random, while lower values like 0.2 will make it more focused and - deterministic. If set to 0, the model will use - [log probability](https://en.wikipedia.org/wiki/Log_probability) to - automatically increase the temperature until certain thresholds are hit. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - body = deepcopy_minimal( - { - "file": file, - "model": model, - "prompt": prompt, - "response_format": response_format, - "temperature": temperature, - } - ) - files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) - # It should be noted that the actual Content-Type header that will be - # sent to the server will contain a `boundary` parameter, e.g. - # multipart/form-data; boundary=---abc-- - extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} - return self._post( - "/openai/v1/audio/translations", - body=maybe_transform(body, translation_create_params.TranslationCreateParams), - files=files, - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=Translation, - ) - - -class AsyncTranslations(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncTranslationsWithRawResponse: - return AsyncTranslationsWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncTranslationsWithStreamingResponse: - return AsyncTranslationsWithStreamingResponse(self) - - async def create( - self, - *, - file: FileTypes, - model: Union[str, Literal["whisper-large-v3"]], - prompt: str | NotGiven = NOT_GIVEN, - response_format: Literal["json", "text", "verbose_json"] | NotGiven = NOT_GIVEN, - temperature: float | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Translation: - """ - Translates audio into English. - - Args: - file: The audio file object (not file name) translate, in one of these formats: flac, - mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - - model: ID of the model to use. Only `whisper-large-v3` is currently available. - - prompt: An optional text to guide the model's style or continue a previous audio - segment. The [prompt](/docs/guides/speech-to-text/prompting) should be in - English. - - response_format: The format of the transcript output, in one of these options: `json`, `text`, or - `verbose_json`. - - temperature: The sampling temperature, between 0 and 1. Higher values like 0.8 will make the - output more random, while lower values like 0.2 will make it more focused and - deterministic. If set to 0, the model will use - [log probability](https://en.wikipedia.org/wiki/Log_probability) to - automatically increase the temperature until certain thresholds are hit. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - body = deepcopy_minimal( - { - "file": file, - "model": model, - "prompt": prompt, - "response_format": response_format, - "temperature": temperature, - } - ) - files = extract_files(cast(Mapping[str, object], body), paths=[["file"]]) - # It should be noted that the actual Content-Type header that will be - # sent to the server will contain a `boundary` parameter, e.g. - # multipart/form-data; boundary=---abc-- - extra_headers = {"Content-Type": "multipart/form-data", **(extra_headers or {})} - return await self._post( - "/openai/v1/audio/translations", - body=await async_maybe_transform(body, translation_create_params.TranslationCreateParams), - files=files, - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=Translation, - ) - - -class TranslationsWithRawResponse: - def __init__(self, translations: Translations) -> None: - self._translations = translations - - self.create = to_raw_response_wrapper( - translations.create, - ) - - -class AsyncTranslationsWithRawResponse: - def __init__(self, translations: AsyncTranslations) -> None: - self._translations = translations - - self.create = async_to_raw_response_wrapper( - translations.create, - ) - - -class TranslationsWithStreamingResponse: - def __init__(self, translations: Translations) -> None: - self._translations = translations - - self.create = to_streamed_response_wrapper( - translations.create, - ) - - -class AsyncTranslationsWithStreamingResponse: - def __init__(self, translations: AsyncTranslations) -> None: - self._translations = translations - - self.create = async_to_streamed_response_wrapper( - translations.create, - ) diff --git a/myenv/Lib/site-packages/groq/resources/chat/__init__.py b/myenv/Lib/site-packages/groq/resources/chat/__init__.py deleted file mode 100644 index 52dfdce..0000000 --- a/myenv/Lib/site-packages/groq/resources/chat/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .chat import ( - Chat, - AsyncChat, - ChatWithRawResponse, - AsyncChatWithRawResponse, - ChatWithStreamingResponse, - AsyncChatWithStreamingResponse, -) -from .completions import ( - Completions, - AsyncCompletions, - CompletionsWithRawResponse, - AsyncCompletionsWithRawResponse, - CompletionsWithStreamingResponse, - AsyncCompletionsWithStreamingResponse, -) - -__all__ = [ - "Completions", - "AsyncCompletions", - "CompletionsWithRawResponse", - "AsyncCompletionsWithRawResponse", - "CompletionsWithStreamingResponse", - "AsyncCompletionsWithStreamingResponse", - "Chat", - "AsyncChat", - "ChatWithRawResponse", - "AsyncChatWithRawResponse", - "ChatWithStreamingResponse", - "AsyncChatWithStreamingResponse", -] diff --git a/myenv/Lib/site-packages/groq/resources/chat/chat.py b/myenv/Lib/site-packages/groq/resources/chat/chat.py deleted file mode 100644 index d14d055..0000000 --- a/myenv/Lib/site-packages/groq/resources/chat/chat.py +++ /dev/null @@ -1,80 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from ..._compat import cached_property -from ..._resource import SyncAPIResource, AsyncAPIResource -from .completions import ( - Completions, - AsyncCompletions, - CompletionsWithRawResponse, - AsyncCompletionsWithRawResponse, - CompletionsWithStreamingResponse, - AsyncCompletionsWithStreamingResponse, -) - -__all__ = ["Chat", "AsyncChat"] - - -class Chat(SyncAPIResource): - @cached_property - def completions(self) -> Completions: - return Completions(self._client) - - @cached_property - def with_raw_response(self) -> ChatWithRawResponse: - return ChatWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> ChatWithStreamingResponse: - return ChatWithStreamingResponse(self) - - -class AsyncChat(AsyncAPIResource): - @cached_property - def completions(self) -> AsyncCompletions: - return AsyncCompletions(self._client) - - @cached_property - def with_raw_response(self) -> AsyncChatWithRawResponse: - return AsyncChatWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncChatWithStreamingResponse: - return AsyncChatWithStreamingResponse(self) - - -class ChatWithRawResponse: - def __init__(self, chat: Chat) -> None: - self._chat = chat - - @cached_property - def completions(self) -> CompletionsWithRawResponse: - return CompletionsWithRawResponse(self._chat.completions) - - -class AsyncChatWithRawResponse: - def __init__(self, chat: AsyncChat) -> None: - self._chat = chat - - @cached_property - def completions(self) -> AsyncCompletionsWithRawResponse: - return AsyncCompletionsWithRawResponse(self._chat.completions) - - -class ChatWithStreamingResponse: - def __init__(self, chat: Chat) -> None: - self._chat = chat - - @cached_property - def completions(self) -> CompletionsWithStreamingResponse: - return CompletionsWithStreamingResponse(self._chat.completions) - - -class AsyncChatWithStreamingResponse: - def __init__(self, chat: AsyncChat) -> None: - self._chat = chat - - @cached_property - def completions(self) -> AsyncCompletionsWithStreamingResponse: - return AsyncCompletionsWithStreamingResponse(self._chat.completions) diff --git a/myenv/Lib/site-packages/groq/resources/chat/completions.py b/myenv/Lib/site-packages/groq/resources/chat/completions.py deleted file mode 100644 index e70cff9..0000000 --- a/myenv/Lib/site-packages/groq/resources/chat/completions.py +++ /dev/null @@ -1,646 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict, List, Union, Iterable, Optional, overload -from typing_extensions import Literal - -import httpx - -from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from ..._utils import ( - maybe_transform, - async_maybe_transform, -) -from ..._compat import cached_property -from ..._resource import SyncAPIResource, AsyncAPIResource -from ..._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from ..._streaming import Stream, AsyncStream -from ...types.chat import completion_create_params -from ..._base_client import make_request_options -from ...types.chat.chat_completion import ChatCompletion -from ...types.chat.chat_completion_chunk import ChatCompletionChunk -from ...types.chat.chat_completion_tool_param import ChatCompletionToolParam -from ...types.chat.chat_completion_message_param import ChatCompletionMessageParam -from ...types.chat.chat_completion_tool_choice_option_param import ChatCompletionToolChoiceOptionParam - -__all__ = ["Completions", "AsyncCompletions"] - - -class Completions(SyncAPIResource): - @cached_property - def with_raw_response(self) -> CompletionsWithRawResponse: - return CompletionsWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> CompletionsWithStreamingResponse: - return CompletionsWithStreamingResponse(self) - - @overload - def create( - self, - *, - messages: Iterable[ChatCompletionMessageParam], - model: str, - frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, - function_call: Optional[completion_create_params.FunctionCall] | NotGiven = NOT_GIVEN, - functions: Optional[Iterable[completion_create_params.Function]] | NotGiven = NOT_GIVEN, - logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, - logprobs: Optional[bool] | NotGiven = NOT_GIVEN, - max_tokens: Optional[int] | NotGiven = NOT_GIVEN, - n: Optional[int] | NotGiven = NOT_GIVEN, - parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN, - presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - response_format: Optional[completion_create_params.ResponseFormat] | NotGiven = NOT_GIVEN, - seed: Optional[int] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, - temperature: Optional[float] | NotGiven = NOT_GIVEN, - tool_choice: Optional[ChatCompletionToolChoiceOptionParam] | NotGiven = NOT_GIVEN, - tools: Optional[Iterable[ChatCompletionToolParam]] | NotGiven = NOT_GIVEN, - top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - user: Optional[str] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ChatCompletion: - ... - - @overload - def create( - self, - *, - messages: Iterable[ChatCompletionMessageParam], - model: str, - frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, - function_call: Optional[completion_create_params.FunctionCall] | NotGiven = NOT_GIVEN, - functions: Optional[Iterable[completion_create_params.Function]] | NotGiven = NOT_GIVEN, - logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, - logprobs: Optional[bool] | NotGiven = NOT_GIVEN, - max_tokens: Optional[int] | NotGiven = NOT_GIVEN, - n: Optional[int] | NotGiven = NOT_GIVEN, - parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN, - presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - response_format: Optional[completion_create_params.ResponseFormat] | NotGiven = NOT_GIVEN, - seed: Optional[int] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream: Literal[True], - temperature: Optional[float] | NotGiven = NOT_GIVEN, - tool_choice: Optional[ChatCompletionToolChoiceOptionParam] | NotGiven = NOT_GIVEN, - tools: Optional[Iterable[ChatCompletionToolParam]] | NotGiven = NOT_GIVEN, - top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - user: Optional[str] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Stream[ChatCompletionChunk]: - ... - - @overload - def create( - self, - *, - messages: Iterable[ChatCompletionMessageParam], - model: str, - frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, - function_call: Optional[completion_create_params.FunctionCall] | NotGiven = NOT_GIVEN, - functions: Optional[Iterable[completion_create_params.Function]] | NotGiven = NOT_GIVEN, - logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, - logprobs: Optional[bool] | NotGiven = NOT_GIVEN, - max_tokens: Optional[int] | NotGiven = NOT_GIVEN, - n: Optional[int] | NotGiven = NOT_GIVEN, - parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN, - presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - response_format: Optional[completion_create_params.ResponseFormat] | NotGiven = NOT_GIVEN, - seed: Optional[int] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream: bool, - temperature: Optional[float] | NotGiven = NOT_GIVEN, - tool_choice: Optional[ChatCompletionToolChoiceOptionParam] | NotGiven = NOT_GIVEN, - tools: Optional[Iterable[ChatCompletionToolParam]] | NotGiven = NOT_GIVEN, - top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - user: Optional[str] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ChatCompletion | Stream[ChatCompletionChunk]: - ... - - def create( - self, - *, - messages: Iterable[ChatCompletionMessageParam], - model: Union[str, Literal["gemma-7b-it", "llama3-70b-8192", "llama3-8b-8192", "mixtral-8x7b-32768"]], - frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, - function_call: Optional[completion_create_params.FunctionCall] | NotGiven = NOT_GIVEN, - functions: Optional[Iterable[completion_create_params.Function]] | NotGiven = NOT_GIVEN, - logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, - logprobs: Optional[bool] | NotGiven = NOT_GIVEN, - max_tokens: Optional[int] | NotGiven = NOT_GIVEN, - n: Optional[int] | NotGiven = NOT_GIVEN, - parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN, - presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - response_format: Optional[completion_create_params.ResponseFormat] | NotGiven = NOT_GIVEN, - seed: Optional[int] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, - temperature: Optional[float] | NotGiven = NOT_GIVEN, - tool_choice: Optional[ChatCompletionToolChoiceOptionParam] | NotGiven = NOT_GIVEN, - tools: Optional[Iterable[ChatCompletionToolParam]] | NotGiven = NOT_GIVEN, - top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - user: Optional[str] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ChatCompletion | Stream[ChatCompletionChunk]: - """ - Creates a model response for the given chat conversation. - - Args: - messages: A list of messages comprising the conversation so far. - - model: ID of the model to use. For details on which models are compatible with the Chat - API, see available [models](/docs/models) - - frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their - existing frequency in the text so far, decreasing the model's likelihood to - repeat the same line verbatim. - - function_call: Deprecated in favor of `tool_choice`. - - Controls which (if any) function is called by the model. `none` means the model - will not call a function and instead generates a message. `auto` means the model - can pick between generating a message or calling a function. Specifying a - particular function via `{"name": "my_function"}` forces the model to call that - function. - - `none` is the default when no functions are present. `auto` is the default if - functions are present. - - functions: Deprecated in favor of `tools`. - - A list of functions the model may generate JSON inputs for. - - logit_bias: This is not yet supported by any of our models. Modify the likelihood of - specified tokens appearing in the completion. - - logprobs: This is not yet supported by any of our models. Whether to return log - probabilities of the output tokens or not. If true, returns the log - probabilities of each output token returned in the `content` of `message`. - - max_tokens: The maximum number of tokens that can be generated in the chat completion. The - total length of input tokens and generated tokens is limited by the model's - context length. - - n: How many chat completion choices to generate for each input message. Note that - the current moment, only n=1 is supported. Other values will result in a 400 - response. - - parallel_tool_calls: Whether to enable parallel function calling during tool use. - - presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on - whether they appear in the text so far, increasing the model's likelihood to - talk about new topics. - - response_format: An object specifying the format that the model must output. - - Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the - message the model generates is valid JSON. - - **Important:** when using JSON mode, you **must** also instruct the model to - produce JSON yourself via a system or user message. - - seed: If specified, our system will make a best effort to sample deterministically, - such that repeated requests with the same `seed` and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - `system_fingerprint` response parameter to monitor changes in the backend. - - stop: Up to 4 sequences where the API will stop generating further tokens. The - returned text will not contain the stop sequence. - - stream: If set, partial message deltas will be sent. Tokens will be sent as data-only - [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) - as they become available, with the stream terminated by a `data: [DONE]` - message. [Example code](/docs/text-chat#streaming-a-chat-completion). - - temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - make the output more random, while lower values like 0.2 will make it more - focused and deterministic. We generally recommend altering this or top_p but not - both - - tool_choice: Controls which (if any) tool is called by the model. `none` means the model will - not call any tool and instead generates a message. `auto` means the model can - pick between generating a message or calling one or more tools. `required` means - the model must call one or more tools. Specifying a particular tool via - `{"type": "function", "function": {"name": "my_function"}}` forces the model to - call that tool. - - `none` is the default when no tools are present. `auto` is the default if tools - are present. - - tools: A list of tools the model may call. Currently, only functions are supported as a - tool. Use this to provide a list of functions the model may generate JSON inputs - for. A max of 128 functions are supported. - - top_logprobs: This is not yet supported by any of our models. An integer between 0 and 20 - specifying the number of most likely tokens to return at each token position, - each with an associated log probability. `logprobs` must be set to `true` if - this parameter is used. - - top_p: An alternative to sampling with temperature, called nucleus sampling, where the - model considers the results of the tokens with top_p probability mass. So 0.1 - means only the tokens comprising the top 10% probability mass are considered. We - generally recommend altering this or temperature but not both. - - user: A unique identifier representing your end-user, which can help us monitor and - detect abuse. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._post( - "/openai/v1/chat/completions", - body=maybe_transform( - { - "messages": messages, - "model": model, - "frequency_penalty": frequency_penalty, - "function_call": function_call, - "functions": functions, - "logit_bias": logit_bias, - "logprobs": logprobs, - "max_tokens": max_tokens, - "n": n, - "parallel_tool_calls": parallel_tool_calls, - "presence_penalty": presence_penalty, - "response_format": response_format, - "seed": seed, - "stop": stop, - "stream": stream, - "temperature": temperature, - "tool_choice": tool_choice, - "tools": tools, - "top_logprobs": top_logprobs, - "top_p": top_p, - "user": user, - }, - completion_create_params.CompletionCreateParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=ChatCompletion, - stream=stream or False, - stream_cls=Stream[ChatCompletionChunk], - ) - - -class AsyncCompletions(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncCompletionsWithRawResponse: - return AsyncCompletionsWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncCompletionsWithStreamingResponse: - return AsyncCompletionsWithStreamingResponse(self) - - @overload - async def create( - self, - *, - messages: Iterable[ChatCompletionMessageParam], - model: str, - frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, - function_call: Optional[completion_create_params.FunctionCall] | NotGiven = NOT_GIVEN, - functions: Optional[Iterable[completion_create_params.Function]] | NotGiven = NOT_GIVEN, - logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, - logprobs: Optional[bool] | NotGiven = NOT_GIVEN, - max_tokens: Optional[int] | NotGiven = NOT_GIVEN, - n: Optional[int] | NotGiven = NOT_GIVEN, - parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN, - presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - response_format: Optional[completion_create_params.ResponseFormat] | NotGiven = NOT_GIVEN, - seed: Optional[int] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream: Optional[Literal[False]] | NotGiven = NOT_GIVEN, - temperature: Optional[float] | NotGiven = NOT_GIVEN, - tool_choice: Optional[ChatCompletionToolChoiceOptionParam] | NotGiven = NOT_GIVEN, - tools: Optional[Iterable[ChatCompletionToolParam]] | NotGiven = NOT_GIVEN, - top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - user: Optional[str] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ChatCompletion: - ... - - @overload - async def create( - self, - *, - messages: Iterable[ChatCompletionMessageParam], - model: str, - frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, - function_call: Optional[completion_create_params.FunctionCall] | NotGiven = NOT_GIVEN, - functions: Optional[Iterable[completion_create_params.Function]] | NotGiven = NOT_GIVEN, - logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, - logprobs: Optional[bool] | NotGiven = NOT_GIVEN, - max_tokens: Optional[int] | NotGiven = NOT_GIVEN, - n: Optional[int] | NotGiven = NOT_GIVEN, - parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN, - presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - response_format: Optional[completion_create_params.ResponseFormat] | NotGiven = NOT_GIVEN, - seed: Optional[int] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream: Literal[True], - temperature: Optional[float] | NotGiven = NOT_GIVEN, - tool_choice: Optional[ChatCompletionToolChoiceOptionParam] | NotGiven = NOT_GIVEN, - tools: Optional[Iterable[ChatCompletionToolParam]] | NotGiven = NOT_GIVEN, - top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - user: Optional[str] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> AsyncStream[ChatCompletionChunk]: - ... - - @overload - async def create( - self, - *, - messages: Iterable[ChatCompletionMessageParam], - model: str, - frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, - function_call: Optional[completion_create_params.FunctionCall] | NotGiven = NOT_GIVEN, - functions: Optional[Iterable[completion_create_params.Function]] | NotGiven = NOT_GIVEN, - logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, - logprobs: Optional[bool] | NotGiven = NOT_GIVEN, - max_tokens: Optional[int] | NotGiven = NOT_GIVEN, - n: Optional[int] | NotGiven = NOT_GIVEN, - parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN, - presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - response_format: Optional[completion_create_params.ResponseFormat] | NotGiven = NOT_GIVEN, - seed: Optional[int] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream: bool, - temperature: Optional[float] | NotGiven = NOT_GIVEN, - tool_choice: Optional[ChatCompletionToolChoiceOptionParam] | NotGiven = NOT_GIVEN, - tools: Optional[Iterable[ChatCompletionToolParam]] | NotGiven = NOT_GIVEN, - top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - user: Optional[str] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ChatCompletion | AsyncStream[ChatCompletionChunk]: - ... - - async def create( - self, - *, - messages: Iterable[ChatCompletionMessageParam], - model: Union[str, Literal["gemma-7b-it", "llama3-70b-8192", "llama3-8b-8192", "mixtral-8x7b-32768"]], - frequency_penalty: Optional[float] | NotGiven = NOT_GIVEN, - function_call: Optional[completion_create_params.FunctionCall] | NotGiven = NOT_GIVEN, - functions: Optional[Iterable[completion_create_params.Function]] | NotGiven = NOT_GIVEN, - logit_bias: Optional[Dict[str, int]] | NotGiven = NOT_GIVEN, - logprobs: Optional[bool] | NotGiven = NOT_GIVEN, - max_tokens: Optional[int] | NotGiven = NOT_GIVEN, - n: Optional[int] | NotGiven = NOT_GIVEN, - parallel_tool_calls: Optional[bool] | NotGiven = NOT_GIVEN, - presence_penalty: Optional[float] | NotGiven = NOT_GIVEN, - response_format: Optional[completion_create_params.ResponseFormat] | NotGiven = NOT_GIVEN, - seed: Optional[int] | NotGiven = NOT_GIVEN, - stop: Union[Optional[str], List[str], None] | NotGiven = NOT_GIVEN, - stream: Optional[Literal[False]] | Literal[True] | NotGiven = NOT_GIVEN, - temperature: Optional[float] | NotGiven = NOT_GIVEN, - tool_choice: Optional[ChatCompletionToolChoiceOptionParam] | NotGiven = NOT_GIVEN, - tools: Optional[Iterable[ChatCompletionToolParam]] | NotGiven = NOT_GIVEN, - top_logprobs: Optional[int] | NotGiven = NOT_GIVEN, - top_p: Optional[float] | NotGiven = NOT_GIVEN, - user: Optional[str] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ChatCompletion | AsyncStream[ChatCompletionChunk]: - """ - Creates a model response for the given chat conversation. - - Args: - messages: A list of messages comprising the conversation so far. - - model: ID of the model to use. For details on which models are compatible with the Chat - API, see available [models](/docs/models) - - frequency_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on their - existing frequency in the text so far, decreasing the model's likelihood to - repeat the same line verbatim. - - function_call: Deprecated in favor of `tool_choice`. - - Controls which (if any) function is called by the model. `none` means the model - will not call a function and instead generates a message. `auto` means the model - can pick between generating a message or calling a function. Specifying a - particular function via `{"name": "my_function"}` forces the model to call that - function. - - `none` is the default when no functions are present. `auto` is the default if - functions are present. - - functions: Deprecated in favor of `tools`. - - A list of functions the model may generate JSON inputs for. - - logit_bias: This is not yet supported by any of our models. Modify the likelihood of - specified tokens appearing in the completion. - - logprobs: This is not yet supported by any of our models. Whether to return log - probabilities of the output tokens or not. If true, returns the log - probabilities of each output token returned in the `content` of `message`. - - max_tokens: The maximum number of tokens that can be generated in the chat completion. The - total length of input tokens and generated tokens is limited by the model's - context length. - - n: How many chat completion choices to generate for each input message. Note that - the current moment, only n=1 is supported. Other values will result in a 400 - response. - - parallel_tool_calls: Whether to enable parallel function calling during tool use. - - presence_penalty: Number between -2.0 and 2.0. Positive values penalize new tokens based on - whether they appear in the text so far, increasing the model's likelihood to - talk about new topics. - - response_format: An object specifying the format that the model must output. - - Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the - message the model generates is valid JSON. - - **Important:** when using JSON mode, you **must** also instruct the model to - produce JSON yourself via a system or user message. - - seed: If specified, our system will make a best effort to sample deterministically, - such that repeated requests with the same `seed` and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - `system_fingerprint` response parameter to monitor changes in the backend. - - stop: Up to 4 sequences where the API will stop generating further tokens. The - returned text will not contain the stop sequence. - - stream: If set, partial message deltas will be sent. Tokens will be sent as data-only - [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) - as they become available, with the stream terminated by a `data: [DONE]` - message. [Example code](/docs/text-chat#streaming-a-chat-completion). - - temperature: What sampling temperature to use, between 0 and 2. Higher values like 0.8 will - make the output more random, while lower values like 0.2 will make it more - focused and deterministic. We generally recommend altering this or top_p but not - both - - tool_choice: Controls which (if any) tool is called by the model. `none` means the model will - not call any tool and instead generates a message. `auto` means the model can - pick between generating a message or calling one or more tools. `required` means - the model must call one or more tools. Specifying a particular tool via - `{"type": "function", "function": {"name": "my_function"}}` forces the model to - call that tool. - - `none` is the default when no tools are present. `auto` is the default if tools - are present. - - tools: A list of tools the model may call. Currently, only functions are supported as a - tool. Use this to provide a list of functions the model may generate JSON inputs - for. A max of 128 functions are supported. - - top_logprobs: This is not yet supported by any of our models. An integer between 0 and 20 - specifying the number of most likely tokens to return at each token position, - each with an associated log probability. `logprobs` must be set to `true` if - this parameter is used. - - top_p: An alternative to sampling with temperature, called nucleus sampling, where the - model considers the results of the tokens with top_p probability mass. So 0.1 - means only the tokens comprising the top 10% probability mass are considered. We - generally recommend altering this or temperature but not both. - - user: A unique identifier representing your end-user, which can help us monitor and - detect abuse. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return await self._post( - "/openai/v1/chat/completions", - body=await async_maybe_transform( - { - "messages": messages, - "model": model, - "frequency_penalty": frequency_penalty, - "function_call": function_call, - "functions": functions, - "logit_bias": logit_bias, - "logprobs": logprobs, - "max_tokens": max_tokens, - "n": n, - "parallel_tool_calls": parallel_tool_calls, - "presence_penalty": presence_penalty, - "response_format": response_format, - "seed": seed, - "stop": stop, - "stream": stream, - "temperature": temperature, - "tool_choice": tool_choice, - "tools": tools, - "top_logprobs": top_logprobs, - "top_p": top_p, - "user": user, - }, - completion_create_params.CompletionCreateParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=ChatCompletion, - stream=stream or False, - stream_cls=AsyncStream[ChatCompletionChunk], - ) - - -class CompletionsWithRawResponse: - def __init__(self, completions: Completions) -> None: - self._completions = completions - - self.create = to_raw_response_wrapper( - completions.create, - ) - - -class AsyncCompletionsWithRawResponse: - def __init__(self, completions: AsyncCompletions) -> None: - self._completions = completions - - self.create = async_to_raw_response_wrapper( - completions.create, - ) - - -class CompletionsWithStreamingResponse: - def __init__(self, completions: Completions) -> None: - self._completions = completions - - self.create = to_streamed_response_wrapper( - completions.create, - ) - - -class AsyncCompletionsWithStreamingResponse: - def __init__(self, completions: AsyncCompletions) -> None: - self._completions = completions - - self.create = async_to_streamed_response_wrapper( - completions.create, - ) diff --git a/myenv/Lib/site-packages/groq/resources/embeddings.py b/myenv/Lib/site-packages/groq/resources/embeddings.py deleted file mode 100644 index d52fd5e..0000000 --- a/myenv/Lib/site-packages/groq/resources/embeddings.py +++ /dev/null @@ -1,193 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List, Union, Optional -from typing_extensions import Literal - -import httpx - -from ..types import embedding_create_params -from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from .._utils import ( - maybe_transform, - async_maybe_transform, -) -from .._compat import cached_property -from .._resource import SyncAPIResource, AsyncAPIResource -from .._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from .._base_client import make_request_options -from ..types.create_embedding_response import CreateEmbeddingResponse - -__all__ = ["Embeddings", "AsyncEmbeddings"] - - -class Embeddings(SyncAPIResource): - @cached_property - def with_raw_response(self) -> EmbeddingsWithRawResponse: - return EmbeddingsWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> EmbeddingsWithStreamingResponse: - return EmbeddingsWithStreamingResponse(self) - - def create( - self, - *, - input: Union[str, List[str]], - model: Union[str, Literal["nomic-embed-text-v1_5"]], - encoding_format: Literal["float", "base64"] | NotGiven = NOT_GIVEN, - user: Optional[str] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> CreateEmbeddingResponse: - """ - Creates an embedding vector representing the input text. - - Args: - input: Input text to embed, encoded as a string or array of tokens. To embed multiple - inputs in a single request, pass an array of strings or array of token arrays. - The input must not exceed the max input tokens for the model, cannot be an empty - string, and any array must be 2048 dimensions or less. - - model: ID of the model to use. - - encoding_format: The format to return the embeddings in. Can only be `float` or `base64`. - - user: A unique identifier representing your end-user, which can help us monitor and - detect abuse. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return self._post( - "/openai/v1/embeddings", - body=maybe_transform( - { - "input": input, - "model": model, - "encoding_format": encoding_format, - "user": user, - }, - embedding_create_params.EmbeddingCreateParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=CreateEmbeddingResponse, - ) - - -class AsyncEmbeddings(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncEmbeddingsWithRawResponse: - return AsyncEmbeddingsWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncEmbeddingsWithStreamingResponse: - return AsyncEmbeddingsWithStreamingResponse(self) - - async def create( - self, - *, - input: Union[str, List[str]], - model: Union[str, Literal["nomic-embed-text-v1_5"]], - encoding_format: Literal["float", "base64"] | NotGiven = NOT_GIVEN, - user: Optional[str] | NotGiven = NOT_GIVEN, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> CreateEmbeddingResponse: - """ - Creates an embedding vector representing the input text. - - Args: - input: Input text to embed, encoded as a string or array of tokens. To embed multiple - inputs in a single request, pass an array of strings or array of token arrays. - The input must not exceed the max input tokens for the model, cannot be an empty - string, and any array must be 2048 dimensions or less. - - model: ID of the model to use. - - encoding_format: The format to return the embeddings in. Can only be `float` or `base64`. - - user: A unique identifier representing your end-user, which can help us monitor and - detect abuse. - - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - return await self._post( - "/openai/v1/embeddings", - body=await async_maybe_transform( - { - "input": input, - "model": model, - "encoding_format": encoding_format, - "user": user, - }, - embedding_create_params.EmbeddingCreateParams, - ), - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=CreateEmbeddingResponse, - ) - - -class EmbeddingsWithRawResponse: - def __init__(self, embeddings: Embeddings) -> None: - self._embeddings = embeddings - - self.create = to_raw_response_wrapper( - embeddings.create, - ) - - -class AsyncEmbeddingsWithRawResponse: - def __init__(self, embeddings: AsyncEmbeddings) -> None: - self._embeddings = embeddings - - self.create = async_to_raw_response_wrapper( - embeddings.create, - ) - - -class EmbeddingsWithStreamingResponse: - def __init__(self, embeddings: Embeddings) -> None: - self._embeddings = embeddings - - self.create = to_streamed_response_wrapper( - embeddings.create, - ) - - -class AsyncEmbeddingsWithStreamingResponse: - def __init__(self, embeddings: AsyncEmbeddings) -> None: - self._embeddings = embeddings - - self.create = async_to_streamed_response_wrapper( - embeddings.create, - ) diff --git a/myenv/Lib/site-packages/groq/resources/models.py b/myenv/Lib/site-packages/groq/resources/models.py deleted file mode 100644 index e81a21c..0000000 --- a/myenv/Lib/site-packages/groq/resources/models.py +++ /dev/null @@ -1,271 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -import httpx - -from .._types import NOT_GIVEN, Body, Query, Headers, NotGiven -from .._compat import cached_property -from .._resource import SyncAPIResource, AsyncAPIResource -from .._response import ( - to_raw_response_wrapper, - to_streamed_response_wrapper, - async_to_raw_response_wrapper, - async_to_streamed_response_wrapper, -) -from ..types.model import Model -from .._base_client import make_request_options -from ..types.model_deleted import ModelDeleted -from ..types.model_list_response import ModelListResponse - -__all__ = ["Models", "AsyncModels"] - - -class Models(SyncAPIResource): - @cached_property - def with_raw_response(self) -> ModelsWithRawResponse: - return ModelsWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> ModelsWithStreamingResponse: - return ModelsWithStreamingResponse(self) - - def retrieve( - self, - model: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Model: - """ - Get a specific model - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not model: - raise ValueError(f"Expected a non-empty value for `model` but received {model!r}") - return self._get( - f"/openai/v1/models/{model}", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=Model, - ) - - def list( - self, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ModelListResponse: - """get all available models""" - return self._get( - "/openai/v1/models", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=ModelListResponse, - ) - - def delete( - self, - model: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ModelDeleted: - """ - Delete a model - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not model: - raise ValueError(f"Expected a non-empty value for `model` but received {model!r}") - return self._delete( - f"/openai/v1/models/{model}", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=ModelDeleted, - ) - - -class AsyncModels(AsyncAPIResource): - @cached_property - def with_raw_response(self) -> AsyncModelsWithRawResponse: - return AsyncModelsWithRawResponse(self) - - @cached_property - def with_streaming_response(self) -> AsyncModelsWithStreamingResponse: - return AsyncModelsWithStreamingResponse(self) - - async def retrieve( - self, - model: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> Model: - """ - Get a specific model - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not model: - raise ValueError(f"Expected a non-empty value for `model` but received {model!r}") - return await self._get( - f"/openai/v1/models/{model}", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=Model, - ) - - async def list( - self, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ModelListResponse: - """get all available models""" - return await self._get( - "/openai/v1/models", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=ModelListResponse, - ) - - async def delete( - self, - model: str, - *, - # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs. - # The extra values given here take precedence over values defined on the client or passed to this method. - extra_headers: Headers | None = None, - extra_query: Query | None = None, - extra_body: Body | None = None, - timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN, - ) -> ModelDeleted: - """ - Delete a model - - Args: - extra_headers: Send extra headers - - extra_query: Add additional query parameters to the request - - extra_body: Add additional JSON properties to the request - - timeout: Override the client-level default timeout for this request, in seconds - """ - if not model: - raise ValueError(f"Expected a non-empty value for `model` but received {model!r}") - return await self._delete( - f"/openai/v1/models/{model}", - options=make_request_options( - extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout - ), - cast_to=ModelDeleted, - ) - - -class ModelsWithRawResponse: - def __init__(self, models: Models) -> None: - self._models = models - - self.retrieve = to_raw_response_wrapper( - models.retrieve, - ) - self.list = to_raw_response_wrapper( - models.list, - ) - self.delete = to_raw_response_wrapper( - models.delete, - ) - - -class AsyncModelsWithRawResponse: - def __init__(self, models: AsyncModels) -> None: - self._models = models - - self.retrieve = async_to_raw_response_wrapper( - models.retrieve, - ) - self.list = async_to_raw_response_wrapper( - models.list, - ) - self.delete = async_to_raw_response_wrapper( - models.delete, - ) - - -class ModelsWithStreamingResponse: - def __init__(self, models: Models) -> None: - self._models = models - - self.retrieve = to_streamed_response_wrapper( - models.retrieve, - ) - self.list = to_streamed_response_wrapper( - models.list, - ) - self.delete = to_streamed_response_wrapper( - models.delete, - ) - - -class AsyncModelsWithStreamingResponse: - def __init__(self, models: AsyncModels) -> None: - self._models = models - - self.retrieve = async_to_streamed_response_wrapper( - models.retrieve, - ) - self.list = async_to_streamed_response_wrapper( - models.list, - ) - self.delete = async_to_streamed_response_wrapper( - models.delete, - ) diff --git a/myenv/Lib/site-packages/groq/types/__init__.py b/myenv/Lib/site-packages/groq/types/__init__.py deleted file mode 100644 index bb0aada..0000000 --- a/myenv/Lib/site-packages/groq/types/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from .model import Model as Model -from .shared import ( - ErrorObject as ErrorObject, - FunctionDefinition as FunctionDefinition, - FunctionParameters as FunctionParameters, -) -from .embedding import Embedding as Embedding -from .model_deleted import ModelDeleted as ModelDeleted -from .completion_usage import CompletionUsage as CompletionUsage -from .model_list_response import ModelListResponse as ModelListResponse -from .embedding_create_params import EmbeddingCreateParams as EmbeddingCreateParams -from .create_embedding_response import CreateEmbeddingResponse as CreateEmbeddingResponse diff --git a/myenv/Lib/site-packages/groq/types/audio/__init__.py b/myenv/Lib/site-packages/groq/types/audio/__init__.py deleted file mode 100644 index a0f598b..0000000 --- a/myenv/Lib/site-packages/groq/types/audio/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from .translation import Translation as Translation -from .transcription import Transcription as Transcription -from .translation_create_params import TranslationCreateParams as TranslationCreateParams -from .transcription_create_params import TranscriptionCreateParams as TranscriptionCreateParams diff --git a/myenv/Lib/site-packages/groq/types/audio/transcription.py b/myenv/Lib/site-packages/groq/types/audio/transcription.py deleted file mode 100644 index 0b6ab39..0000000 --- a/myenv/Lib/site-packages/groq/types/audio/transcription.py +++ /dev/null @@ -1,12 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - - - -from ..._models import BaseModel - -__all__ = ["Transcription"] - - -class Transcription(BaseModel): - text: str - """The transcribed text.""" diff --git a/myenv/Lib/site-packages/groq/types/audio/transcription_create_params.py b/myenv/Lib/site-packages/groq/types/audio/transcription_create_params.py deleted file mode 100644 index 9c09a17..0000000 --- a/myenv/Lib/site-packages/groq/types/audio/transcription_create_params.py +++ /dev/null @@ -1,164 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List, Union -from typing_extensions import Literal, Required, TypedDict - -from ..._types import FileTypes - -__all__ = ["TranscriptionCreateParams"] - - -class TranscriptionCreateParams(TypedDict, total=False): - file: Required[FileTypes] - """ - The audio file object (not file name) to transcribe, in one of these formats: - flac, mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - """ - - model: Required[Union[str, Literal["whisper-large-v3"]]] - """ID of the model to use. Only `whisper-large-v3` is currently available.""" - - language: Union[ - str, - Literal[ - "en", - "zh", - "de", - "es", - "ru", - "ko", - "fr", - "ja", - "pt", - "tr", - "pl", - "ca", - "nl", - "ar", - "sv", - "it", - "id", - "hi", - "fi", - "vi", - "he", - "uk", - "el", - "ms", - "cs", - "ro", - "da", - "hu", - "ta", - "th", - "ur", - "hr", - "bg", - "lt", - "la", - "mi", - "ml", - "cy", - "sk", - "te", - "fa", - "lv", - "bn", - "sr", - "az", - "sl", - "kn", - "et", - "mk", - "br", - "eu", - "is", - "hy", - "ne", - "mn", - "bs", - "kk", - "sq", - "sw", - "gl", - "mr", - "pa", - "si", - "km", - "sn", - "yo", - "so", - "af", - "oc", - "ka", - "be", - "tg", - "sd", - "gu", - "am", - "yi", - "lo", - "uz", - "fo", - "ht", - "ps", - "tk", - "nn", - "mt", - "sa", - "lb", - "my", - "bo", - "tl", - "mg", - "as", - "tt", - "haw", - "ln", - "ha", - "ba", - "jv", - "su", - "yue", - ], - ] - """The language of the input audio. - - Supplying the input language in - [ISO-639-1](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes) format will - improve accuracy and latency. - """ - - prompt: str - """An optional text to guide the model's style or continue a previous audio - segment. - - The [prompt](/docs/guides/speech-to-text/prompting) should match the audio - language. - """ - - response_format: Literal["json", "text", "verbose_json"] - """ - The format of the transcript output, in one of these options: `json`, `text`, or - `verbose_json`. - """ - - temperature: float - """The sampling temperature, between 0 and 1. - - Higher values like 0.8 will make the output more random, while lower values like - 0.2 will make it more focused and deterministic. If set to 0, the model will use - [log probability](https://en.wikipedia.org/wiki/Log_probability) to - automatically increase the temperature until certain thresholds are hit. - """ - - timestamp_granularities: List[Literal["word", "segment"]] - """The timestamp granularities to populate for this transcription. - - `response_format` must be set `verbose_json` to use timestamp granularities. - Either or both of these options are supported: `word`, or `segment`. Note: There - is no additional latency for segment timestamps, but generating word timestamps - incurs additional latency. - """ diff --git a/myenv/Lib/site-packages/groq/types/audio/translation.py b/myenv/Lib/site-packages/groq/types/audio/translation.py deleted file mode 100644 index 3d9ede2..0000000 --- a/myenv/Lib/site-packages/groq/types/audio/translation.py +++ /dev/null @@ -1,11 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - - - -from ..._models import BaseModel - -__all__ = ["Translation"] - - -class Translation(BaseModel): - text: str diff --git a/myenv/Lib/site-packages/groq/types/audio/translation_create_params.py b/myenv/Lib/site-packages/groq/types/audio/translation_create_params.py deleted file mode 100644 index 6e2d4b4..0000000 --- a/myenv/Lib/site-packages/groq/types/audio/translation_create_params.py +++ /dev/null @@ -1,43 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Union -from typing_extensions import Literal, Required, TypedDict - -from ..._types import FileTypes - -__all__ = ["TranslationCreateParams"] - - -class TranslationCreateParams(TypedDict, total=False): - file: Required[FileTypes] - """ - The audio file object (not file name) translate, in one of these formats: flac, - mp3, mp4, mpeg, mpga, m4a, ogg, wav, or webm. - """ - - model: Required[Union[str, Literal["whisper-large-v3"]]] - """ID of the model to use. Only `whisper-large-v3` is currently available.""" - - prompt: str - """An optional text to guide the model's style or continue a previous audio - segment. - - The [prompt](/docs/guides/speech-to-text/prompting) should be in English. - """ - - response_format: Literal["json", "text", "verbose_json"] - """ - The format of the transcript output, in one of these options: `json`, `text`, or - `verbose_json`. - """ - - temperature: float - """The sampling temperature, between 0 and 1. - - Higher values like 0.8 will make the output more random, while lower values like - 0.2 will make it more focused and deterministic. If set to 0, the model will use - [log probability](https://en.wikipedia.org/wiki/Log_probability) to - automatically increase the temperature until certain thresholds are hit. - """ diff --git a/myenv/Lib/site-packages/groq/types/chat/__init__.py b/myenv/Lib/site-packages/groq/types/chat/__init__.py deleted file mode 100644 index 5d122d2..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/__init__.py +++ /dev/null @@ -1,41 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from .chat_completion import ChatCompletion as ChatCompletion -from .chat_completion_role import ChatCompletionRole as ChatCompletionRole -from .chat_completion_chunk import ChatCompletionChunk as ChatCompletionChunk -from .chat_completion_message import ChatCompletionMessage as ChatCompletionMessage -from .completion_create_params import CompletionCreateParams as CompletionCreateParams -from .chat_completion_tool_param import ChatCompletionToolParam as ChatCompletionToolParam -from .chat_completion_message_param import ChatCompletionMessageParam as ChatCompletionMessageParam -from .chat_completion_token_logprob import ChatCompletionTokenLogprob as ChatCompletionTokenLogprob -from .chat_completion_message_tool_call import ChatCompletionMessageToolCall as ChatCompletionMessageToolCall -from .chat_completion_content_part_param import ChatCompletionContentPartParam as ChatCompletionContentPartParam -from .chat_completion_tool_message_param import ChatCompletionToolMessageParam as ChatCompletionToolMessageParam -from .chat_completion_user_message_param import ChatCompletionUserMessageParam as ChatCompletionUserMessageParam -from .chat_completion_system_message_param import ChatCompletionSystemMessageParam as ChatCompletionSystemMessageParam -from .chat_completion_function_message_param import ( - ChatCompletionFunctionMessageParam as ChatCompletionFunctionMessageParam, -) -from .chat_completion_assistant_message_param import ( - ChatCompletionAssistantMessageParam as ChatCompletionAssistantMessageParam, -) -from .chat_completion_content_part_text_param import ( - ChatCompletionContentPartTextParam as ChatCompletionContentPartTextParam, -) -from .chat_completion_message_tool_call_param import ( - ChatCompletionMessageToolCallParam as ChatCompletionMessageToolCallParam, -) -from .chat_completion_named_tool_choice_param import ( - ChatCompletionNamedToolChoiceParam as ChatCompletionNamedToolChoiceParam, -) -from .chat_completion_content_part_image_param import ( - ChatCompletionContentPartImageParam as ChatCompletionContentPartImageParam, -) -from .chat_completion_tool_choice_option_param import ( - ChatCompletionToolChoiceOptionParam as ChatCompletionToolChoiceOptionParam, -) -from .chat_completion_function_call_option_param import ( - ChatCompletionFunctionCallOptionParam as ChatCompletionFunctionCallOptionParam, -) diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion.py deleted file mode 100644 index 4f8b04b..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion.py +++ /dev/null @@ -1,66 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List, Optional -from typing_extensions import Literal - -from ..._models import BaseModel -from ..completion_usage import CompletionUsage -from .chat_completion_message import ChatCompletionMessage -from .chat_completion_token_logprob import ChatCompletionTokenLogprob - -__all__ = ["ChatCompletion", "Choice", "ChoiceLogprobs"] - - -class ChoiceLogprobs(BaseModel): - content: Optional[List[ChatCompletionTokenLogprob]] = None - """A list of message content tokens with log probability information.""" - - -class Choice(BaseModel): - finish_reason: Literal["stop", "length", "tool_calls", "function_call"] - """The reason the model stopped generating tokens. - - This will be `stop` if the model hit a natural stop point or a provided stop - sequence, `length` if the maximum number of tokens specified in the request was - reached, `tool_calls` if the model called a tool, or `function_call` - (deprecated) if the model called a function. - """ - - index: int - """The index of the choice in the list of choices.""" - - logprobs: Optional[ChoiceLogprobs] = None - """Log probability information for the choice.""" - - message: ChatCompletionMessage - """A chat completion message generated by the model.""" - - -class ChatCompletion(BaseModel): - id: str - """A unique identifier for the chat completion.""" - - choices: List[Choice] - """A list of chat completion choices. - - Can be more than one if `n` is greater than 1. - """ - - created: int - """The Unix timestamp (in seconds) of when the chat completion was created.""" - - model: str - """The model used for the chat completion.""" - - object: Literal["chat.completion"] - """The object type, which is always `chat.completion`.""" - - system_fingerprint: Optional[str] = None - """This fingerprint represents the backend configuration that the model runs with. - - Can be used in conjunction with the `seed` request parameter to understand when - backend changes have been made that might impact determinism. - """ - - usage: Optional[CompletionUsage] = None - """Usage statistics for the completion request.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_assistant_message_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_assistant_message_param.py deleted file mode 100644 index 1aecfd1..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_assistant_message_param.py +++ /dev/null @@ -1,51 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Iterable, Optional -from typing_extensions import Literal, Required, TypedDict - -from .chat_completion_message_tool_call_param import ChatCompletionMessageToolCallParam - -__all__ = ["ChatCompletionAssistantMessageParam", "FunctionCall"] - - -class FunctionCall(TypedDict, total=False): - arguments: str - """ - The arguments to call the function with, as generated by the model in JSON - format. Note that the model does not always generate valid JSON, and may - hallucinate parameters not defined by your function schema. Validate the - arguments in your code before calling your function. - """ - - name: str - """The name of the function to call.""" - - -class ChatCompletionAssistantMessageParam(TypedDict, total=False): - role: Required[Literal["assistant"]] - """The role of the messages author, in this case `assistant`.""" - - content: Optional[str] - """The contents of the assistant message. - - Required unless `tool_calls` or `function_call` is specified. - """ - - function_call: FunctionCall - """Deprecated and replaced by `tool_calls`. - - The name and arguments of a function that should be called, as generated by the - model. - """ - - name: str - """An optional name for the participant. - - Provides the model information to differentiate between participants of the same - role. - """ - - tool_calls: Iterable[ChatCompletionMessageToolCallParam] - """The tool calls generated by the model, such as function calls.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_chunk.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_chunk.py deleted file mode 100644 index 06d2a86..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_chunk.py +++ /dev/null @@ -1,158 +0,0 @@ -# File Manually added to support streaming -# File is in libs instead of models to avoid conflicts with stainless bot - -from typing import List, Optional -from typing_extensions import Literal - -from ..._models import BaseModel -from ..completion_usage import CompletionUsage -from .chat_completion_token_logprob import ChatCompletionTokenLogprob - -__all__ = [ - "ChatCompletionChunk", - "Choice", - "ChoiceDelta", - "ChoiceDeltaFunctionCall", - "ChoiceDeltaToolCall", - "ChoiceDeltaToolCallFunction", - "ChoiceLogprobs", -] - - -class ChoiceDeltaFunctionCall(BaseModel): - arguments: Optional[str] = None - """ - The arguments to call the function with, as generated by the model in JSON - format. Note that the model does not always generate valid JSON, and may - hallucinate parameters not defined by your function schema. Validate the - arguments in your code before calling your function. - """ - - name: Optional[str] = None - """The name of the function to call.""" - - -class ChoiceDeltaToolCallFunction(BaseModel): - arguments: Optional[str] = None - """ - The arguments to call the function with, as generated by the model in JSON - format. Note that the model does not always generate valid JSON, and may - hallucinate parameters not defined by your function schema. Validate the - arguments in your code before calling your function. - """ - - name: Optional[str] = None - """The name of the function to call.""" - - -class ChoiceDeltaToolCall(BaseModel): - index: int - - id: Optional[str] = None - """The ID of the tool call.""" - - function: Optional[ChoiceDeltaToolCallFunction] = None - - type: Optional[Literal["function"]] = None - """The type of the tool. Currently, only `function` is supported.""" - - -class ChoiceDelta(BaseModel): - content: Optional[str] = None - """The contents of the chunk message.""" - - function_call: Optional[ChoiceDeltaFunctionCall] = None - """Deprecated and replaced by `tool_calls`. - - The name and arguments of a function that should be called, as generated by the - model. - """ - - role: Optional[Literal["system", "user", "assistant", "tool"]] = None - """The role of the author of this message.""" - - tool_calls: Optional[List[ChoiceDeltaToolCall]] = None - - -class ChoiceLogprobs(BaseModel): - content: Optional[List[ChatCompletionTokenLogprob]] = None - """A list of message content tokens with log probability information.""" - - -class Choice(BaseModel): - delta: ChoiceDelta - """A chat completion delta generated by streamed model responses.""" - - finish_reason: Optional[Literal["stop", "length", "tool_calls", "content_filter", "function_call"]] = None - """The reason the model stopped generating tokens. - - This will be `stop` if the model hit a natural stop point or a provided stop - sequence, `length` if the maximum number of tokens specified in the request was - reached, `content_filter` if content was omitted due to a flag from our content - filters, `tool_calls` if the model called a tool, or `function_call` - (deprecated) if the model called a function. - """ - - index: int - """The index of the choice in the list of choices.""" - - logprobs: Optional[ChoiceLogprobs] = None - """Log probability information for the choice.""" - - -class XGroq(BaseModel): - id: Optional[str] - """ - A groq request ID which can be used by to refer to a specific request to groq support - Only sent with the first chunk - """ - - usage: Optional[CompletionUsage] - """Usage information for the stream. Only sent in the final chunk""" - - error: Optional[str] - """ An error string indicating why a stream was stopped early""" - - -class ChatCompletionChunk(BaseModel): - id: str - """A unique identifier for the chat completion. Each chunk has the same ID.""" - - choices: List[Choice] - """A list of chat completion choices. - - Can contain more than one elements if `n` is greater than 1. Can also be empty - for the last chunk if you set `stream_options: {"include_usage": true}`. - """ - - created: int - """The Unix timestamp (in seconds) of when the chat completion was created. - - Each chunk has the same timestamp. - """ - - model: str - """The model to generate the completion.""" - - object: Literal["chat.completion.chunk"] - """The object type, which is always `chat.completion.chunk`.""" - - system_fingerprint: Optional[str] = None - """ - This fingerprint represents the backend configuration that the model runs with. - Can be used in conjunction with the `seed` request parameter to understand when - backend changes have been made that might impact determinism. - """ - - usage: Optional[CompletionUsage] = None - """ - An optional field that will only be present when you set - `stream_options: {"include_usage": true}` in your request. When present, it - contains a null value except for the last chunk which contains the token usage - statistics for the entire request. - """ - - x_groq: Optional[XGroq] - """ - Additional metadata provided by groq. - """ diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_content_part_image_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_content_part_image_param.py deleted file mode 100644 index c1e0841..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_content_part_image_param.py +++ /dev/null @@ -1,22 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ChatCompletionContentPartImageParam", "ImageURL"] - - -class ImageURL(TypedDict, total=False): - url: Required[str] - """Either a URL of the image or the base64 encoded image data.""" - - detail: Literal["auto", "low", "high"] - """Specifies the detail level of the image.""" - - -class ChatCompletionContentPartImageParam(TypedDict, total=False): - image_url: Required[ImageURL] - - type: Required[Literal["image_url"]] - """The type of the content part.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_content_part_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_content_part_param.py deleted file mode 100644 index e0c6e48..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_content_part_param.py +++ /dev/null @@ -1,15 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Union -from typing_extensions import TypeAlias - -from .chat_completion_content_part_text_param import ChatCompletionContentPartTextParam -from .chat_completion_content_part_image_param import ChatCompletionContentPartImageParam - -__all__ = ["ChatCompletionContentPartParam"] - -ChatCompletionContentPartParam: TypeAlias = Union[ - ChatCompletionContentPartTextParam, ChatCompletionContentPartImageParam -] diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_content_part_text_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_content_part_text_param.py deleted file mode 100644 index a270744..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_content_part_text_param.py +++ /dev/null @@ -1,15 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ChatCompletionContentPartTextParam"] - - -class ChatCompletionContentPartTextParam(TypedDict, total=False): - text: Required[str] - """The text content.""" - - type: Required[Literal["text"]] - """The type of the content part.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_function_call_option_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_function_call_option_param.py deleted file mode 100644 index 2bc014a..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_function_call_option_param.py +++ /dev/null @@ -1,12 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Required, TypedDict - -__all__ = ["ChatCompletionFunctionCallOptionParam"] - - -class ChatCompletionFunctionCallOptionParam(TypedDict, total=False): - name: Required[str] - """The name of the function to call.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_function_message_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_function_message_param.py deleted file mode 100644 index 5af12bf..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_function_message_param.py +++ /dev/null @@ -1,19 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Optional -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ChatCompletionFunctionMessageParam"] - - -class ChatCompletionFunctionMessageParam(TypedDict, total=False): - content: Required[Optional[str]] - """The contents of the function message.""" - - name: Required[str] - """The name of the function to call.""" - - role: Required[Literal["function"]] - """The role of the messages author, in this case `function`.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_message.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_message.py deleted file mode 100644 index 8db7d17..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_message.py +++ /dev/null @@ -1,40 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List, Optional -from typing_extensions import Literal - -from ..._models import BaseModel -from .chat_completion_message_tool_call import ChatCompletionMessageToolCall - -__all__ = ["ChatCompletionMessage", "FunctionCall"] - - -class FunctionCall(BaseModel): - arguments: str - """ - The arguments to call the function with, as generated by the model in JSON - format. Note that the model does not always generate valid JSON, and may - hallucinate parameters not defined by your function schema. Validate the - arguments in your code before calling your function. - """ - - name: str - """The name of the function to call.""" - - -class ChatCompletionMessage(BaseModel): - content: Optional[str] = None - """The contents of the message.""" - - role: Literal["assistant"] - """The role of the author of this message.""" - - function_call: Optional[FunctionCall] = None - """Deprecated and replaced by `tool_calls`. - - The name and arguments of a function that should be called, as generated by the - model. - """ - - tool_calls: Optional[List[ChatCompletionMessageToolCall]] = None - """The tool calls generated by the model, such as function calls.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_message_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_message_param.py deleted file mode 100644 index ec65d94..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_message_param.py +++ /dev/null @@ -1,22 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Union -from typing_extensions import TypeAlias - -from .chat_completion_tool_message_param import ChatCompletionToolMessageParam -from .chat_completion_user_message_param import ChatCompletionUserMessageParam -from .chat_completion_system_message_param import ChatCompletionSystemMessageParam -from .chat_completion_function_message_param import ChatCompletionFunctionMessageParam -from .chat_completion_assistant_message_param import ChatCompletionAssistantMessageParam - -__all__ = ["ChatCompletionMessageParam"] - -ChatCompletionMessageParam: TypeAlias = Union[ - ChatCompletionSystemMessageParam, - ChatCompletionUserMessageParam, - ChatCompletionAssistantMessageParam, - ChatCompletionToolMessageParam, - ChatCompletionFunctionMessageParam, -] diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_message_tool_call.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_message_tool_call.py deleted file mode 100644 index 4fec667..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_message_tool_call.py +++ /dev/null @@ -1,31 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing_extensions import Literal - -from ..._models import BaseModel - -__all__ = ["ChatCompletionMessageToolCall", "Function"] - - -class Function(BaseModel): - arguments: str - """ - The arguments to call the function with, as generated by the model in JSON - format. Note that the model does not always generate valid JSON, and may - hallucinate parameters not defined by your function schema. Validate the - arguments in your code before calling your function. - """ - - name: str - """The name of the function to call.""" - - -class ChatCompletionMessageToolCall(BaseModel): - id: str - """The ID of the tool call.""" - - function: Function - """The function that the model called.""" - - type: Literal["function"] - """The type of the tool. Currently, only `function` is supported.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_message_tool_call_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_message_tool_call_param.py deleted file mode 100644 index f616c36..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_message_tool_call_param.py +++ /dev/null @@ -1,31 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ChatCompletionMessageToolCallParam", "Function"] - - -class Function(TypedDict, total=False): - arguments: Required[str] - """ - The arguments to call the function with, as generated by the model in JSON - format. Note that the model does not always generate valid JSON, and may - hallucinate parameters not defined by your function schema. Validate the - arguments in your code before calling your function. - """ - - name: Required[str] - """The name of the function to call.""" - - -class ChatCompletionMessageToolCallParam(TypedDict, total=False): - id: Required[str] - """The ID of the tool call.""" - - function: Required[Function] - """The function that the model called.""" - - type: Required[Literal["function"]] - """The type of the tool. Currently, only `function` is supported.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_named_tool_choice_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_named_tool_choice_param.py deleted file mode 100644 index 369f8b4..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_named_tool_choice_param.py +++ /dev/null @@ -1,19 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ChatCompletionNamedToolChoiceParam", "Function"] - - -class Function(TypedDict, total=False): - name: Required[str] - """The name of the function to call.""" - - -class ChatCompletionNamedToolChoiceParam(TypedDict, total=False): - function: Required[Function] - - type: Required[Literal["function"]] - """The type of the tool. Currently, only `function` is supported.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_role.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_role.py deleted file mode 100644 index c2ebef7..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_role.py +++ /dev/null @@ -1,7 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing_extensions import Literal, TypeAlias - -__all__ = ["ChatCompletionRole"] - -ChatCompletionRole: TypeAlias = Literal["system", "user", "assistant", "tool", "function"] diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_system_message_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_system_message_param.py deleted file mode 100644 index 94bb3f6..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_system_message_param.py +++ /dev/null @@ -1,22 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ChatCompletionSystemMessageParam"] - - -class ChatCompletionSystemMessageParam(TypedDict, total=False): - content: Required[str] - """The contents of the system message.""" - - role: Required[Literal["system"]] - """The role of the messages author, in this case `system`.""" - - name: str - """An optional name for the participant. - - Provides the model information to differentiate between participants of the same - role. - """ diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_token_logprob.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_token_logprob.py deleted file mode 100644 index c69e258..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_token_logprob.py +++ /dev/null @@ -1,57 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List, Optional - -from ..._models import BaseModel - -__all__ = ["ChatCompletionTokenLogprob", "TopLogprob"] - - -class TopLogprob(BaseModel): - token: str - """The token.""" - - bytes: Optional[List[int]] = None - """A list of integers representing the UTF-8 bytes representation of the token. - - Useful in instances where characters are represented by multiple tokens and - their byte representations must be combined to generate the correct text - representation. Can be `null` if there is no bytes representation for the token. - """ - - logprob: float - """The log probability of this token, if it is within the top 20 most likely - tokens. - - Otherwise, the value `-9999.0` is used to signify that the token is very - unlikely. - """ - - -class ChatCompletionTokenLogprob(BaseModel): - token: str - """The token.""" - - bytes: Optional[List[int]] = None - """A list of integers representing the UTF-8 bytes representation of the token. - - Useful in instances where characters are represented by multiple tokens and - their byte representations must be combined to generate the correct text - representation. Can be `null` if there is no bytes representation for the token. - """ - - logprob: float - """The log probability of this token, if it is within the top 20 most likely - tokens. - - Otherwise, the value `-9999.0` is used to signify that the token is very - unlikely. - """ - - top_logprobs: List[TopLogprob] - """List of the most likely tokens and their log probability, at this token - position. - - In rare cases, there may be fewer than the number of requested `top_logprobs` - returned. - """ diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_tool_choice_option_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_tool_choice_option_param.py deleted file mode 100644 index 7dedf04..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_tool_choice_option_param.py +++ /dev/null @@ -1,14 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Union -from typing_extensions import Literal, TypeAlias - -from .chat_completion_named_tool_choice_param import ChatCompletionNamedToolChoiceParam - -__all__ = ["ChatCompletionToolChoiceOptionParam"] - -ChatCompletionToolChoiceOptionParam: TypeAlias = Union[ - Literal["none", "auto", "required"], ChatCompletionNamedToolChoiceParam -] diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_tool_message_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_tool_message_param.py deleted file mode 100644 index 5c590e0..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_tool_message_param.py +++ /dev/null @@ -1,18 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["ChatCompletionToolMessageParam"] - - -class ChatCompletionToolMessageParam(TypedDict, total=False): - content: Required[str] - """The contents of the tool message.""" - - role: Required[Literal["tool"]] - """The role of the messages author, in this case `tool`.""" - - tool_call_id: Required[str] - """Tool call that this message is responding to.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_tool_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_tool_param.py deleted file mode 100644 index 6c2b1a3..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_tool_param.py +++ /dev/null @@ -1,16 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Literal, Required, TypedDict - -from ..shared_params.function_definition import FunctionDefinition - -__all__ = ["ChatCompletionToolParam"] - - -class ChatCompletionToolParam(TypedDict, total=False): - function: Required[FunctionDefinition] - - type: Required[Literal["function"]] - """The type of the tool. Currently, only `function` is supported.""" diff --git a/myenv/Lib/site-packages/groq/types/chat/chat_completion_user_message_param.py b/myenv/Lib/site-packages/groq/types/chat/chat_completion_user_message_param.py deleted file mode 100644 index 5c15322..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/chat_completion_user_message_param.py +++ /dev/null @@ -1,25 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Union, Iterable -from typing_extensions import Literal, Required, TypedDict - -from .chat_completion_content_part_param import ChatCompletionContentPartParam - -__all__ = ["ChatCompletionUserMessageParam"] - - -class ChatCompletionUserMessageParam(TypedDict, total=False): - content: Required[Union[str, Iterable[ChatCompletionContentPartParam]]] - """The contents of the user message.""" - - role: Required[Literal["user"]] - """The role of the messages author, in this case `user`.""" - - name: str - """An optional name for the participant. - - Provides the model information to differentiate between participants of the same - role. - """ diff --git a/myenv/Lib/site-packages/groq/types/chat/completion_create_params.py b/myenv/Lib/site-packages/groq/types/chat/completion_create_params.py deleted file mode 100644 index 811c2d1..0000000 --- a/myenv/Lib/site-packages/groq/types/chat/completion_create_params.py +++ /dev/null @@ -1,206 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict, List, Union, Iterable, Optional -from typing_extensions import Literal, Required, TypeAlias, TypedDict - -from .chat_completion_tool_param import ChatCompletionToolParam -from .chat_completion_message_param import ChatCompletionMessageParam -from ..shared_params.function_parameters import FunctionParameters -from .chat_completion_tool_choice_option_param import ChatCompletionToolChoiceOptionParam -from .chat_completion_function_call_option_param import ChatCompletionFunctionCallOptionParam - -__all__ = ["CompletionCreateParams", "FunctionCall", "Function", "ResponseFormat"] - - -class CompletionCreateParams(TypedDict, total=False): - messages: Required[Iterable[ChatCompletionMessageParam]] - """A list of messages comprising the conversation so far.""" - - model: Required[Union[str, Literal["gemma-7b-it", "llama3-70b-8192", "llama3-8b-8192", "mixtral-8x7b-32768"]]] - """ID of the model to use. - - For details on which models are compatible with the Chat API, see available - [models](/docs/models) - """ - - frequency_penalty: Optional[float] - """Number between -2.0 and 2.0. - - Positive values penalize new tokens based on their existing frequency in the - text so far, decreasing the model's likelihood to repeat the same line verbatim. - """ - - function_call: Optional[FunctionCall] - """Deprecated in favor of `tool_choice`. - - Controls which (if any) function is called by the model. `none` means the model - will not call a function and instead generates a message. `auto` means the model - can pick between generating a message or calling a function. Specifying a - particular function via `{"name": "my_function"}` forces the model to call that - function. - - `none` is the default when no functions are present. `auto` is the default if - functions are present. - """ - - functions: Optional[Iterable[Function]] - """Deprecated in favor of `tools`. - - A list of functions the model may generate JSON inputs for. - """ - - logit_bias: Optional[Dict[str, int]] - """ - This is not yet supported by any of our models. Modify the likelihood of - specified tokens appearing in the completion. - """ - - logprobs: Optional[bool] - """ - This is not yet supported by any of our models. Whether to return log - probabilities of the output tokens or not. If true, returns the log - probabilities of each output token returned in the `content` of `message`. - """ - - max_tokens: Optional[int] - """The maximum number of tokens that can be generated in the chat completion. - - The total length of input tokens and generated tokens is limited by the model's - context length. - """ - - n: Optional[int] - """How many chat completion choices to generate for each input message. - - Note that the current moment, only n=1 is supported. Other values will result in - a 400 response. - """ - - parallel_tool_calls: Optional[bool] - """Whether to enable parallel function calling during tool use.""" - - presence_penalty: Optional[float] - """Number between -2.0 and 2.0. - - Positive values penalize new tokens based on whether they appear in the text so - far, increasing the model's likelihood to talk about new topics. - """ - - response_format: Optional[ResponseFormat] - """An object specifying the format that the model must output. - - Setting to `{ "type": "json_object" }` enables JSON mode, which guarantees the - message the model generates is valid JSON. - - **Important:** when using JSON mode, you **must** also instruct the model to - produce JSON yourself via a system or user message. - """ - - seed: Optional[int] - """ - If specified, our system will make a best effort to sample deterministically, - such that repeated requests with the same `seed` and parameters should return - the same result. Determinism is not guaranteed, and you should refer to the - `system_fingerprint` response parameter to monitor changes in the backend. - """ - - stop: Union[Optional[str], List[str], None] - """Up to 4 sequences where the API will stop generating further tokens. - - The returned text will not contain the stop sequence. - """ - - stream: Optional[bool] - """If set, partial message deltas will be sent. - - Tokens will be sent as data-only - [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) - as they become available, with the stream terminated by a `data: [DONE]` - message. [Example code](/docs/text-chat#streaming-a-chat-completion). - """ - - temperature: Optional[float] - """What sampling temperature to use, between 0 and 2. - - Higher values like 0.8 will make the output more random, while lower values like - 0.2 will make it more focused and deterministic. We generally recommend altering - this or top_p but not both - """ - - tool_choice: Optional[ChatCompletionToolChoiceOptionParam] - """ - Controls which (if any) tool is called by the model. `none` means the model will - not call any tool and instead generates a message. `auto` means the model can - pick between generating a message or calling one or more tools. `required` means - the model must call one or more tools. Specifying a particular tool via - `{"type": "function", "function": {"name": "my_function"}}` forces the model to - call that tool. - - `none` is the default when no tools are present. `auto` is the default if tools - are present. - """ - - tools: Optional[Iterable[ChatCompletionToolParam]] - """A list of tools the model may call. - - Currently, only functions are supported as a tool. Use this to provide a list of - functions the model may generate JSON inputs for. A max of 128 functions are - supported. - """ - - top_logprobs: Optional[int] - """ - This is not yet supported by any of our models. An integer between 0 and 20 - specifying the number of most likely tokens to return at each token position, - each with an associated log probability. `logprobs` must be set to `true` if - this parameter is used. - """ - - top_p: Optional[float] - """ - An alternative to sampling with temperature, called nucleus sampling, where the - model considers the results of the tokens with top_p probability mass. So 0.1 - means only the tokens comprising the top 10% probability mass are considered. We - generally recommend altering this or temperature but not both. - """ - - user: Optional[str] - """ - A unique identifier representing your end-user, which can help us monitor and - detect abuse. - """ - - -FunctionCall: TypeAlias = Union[Literal["none", "auto", "required"], ChatCompletionFunctionCallOptionParam] - - -class Function(TypedDict, total=False): - name: Required[str] - """The name of the function to be called. - - Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length - of 64. - """ - - description: str - """ - A description of what the function does, used by the model to choose when and - how to call the function. - """ - - parameters: FunctionParameters - """The parameters the functions accepts, described as a JSON Schema object. - - See the docs on [tool use](/docs/tool-use) for examples, and the - [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for - documentation about the format. - - Omitting `parameters` defines a function with an empty parameter list. - """ - - -class ResponseFormat(TypedDict, total=False): - type: Literal["text", "json_object"] - """Must be one of `text` or `json_object`.""" diff --git a/myenv/Lib/site-packages/groq/types/completion_usage.py b/myenv/Lib/site-packages/groq/types/completion_usage.py deleted file mode 100644 index 5ca4ff3..0000000 --- a/myenv/Lib/site-packages/groq/types/completion_usage.py +++ /dev/null @@ -1,30 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Optional - -from .._models import BaseModel - -__all__ = ["CompletionUsage"] - - -class CompletionUsage(BaseModel): - completion_tokens: int - """Number of tokens in the generated completion.""" - - prompt_tokens: int - """Number of tokens in the prompt.""" - - total_tokens: int - """Total number of tokens used in the request (prompt + completion).""" - - completion_time: Optional[float] = None - """Time spent generating tokens""" - - prompt_time: Optional[float] = None - """Time spent processing input tokens""" - - queue_time: Optional[float] = None - """Time the requests was spent queued""" - - total_time: Optional[float] = None - """completion time and prompt time combined""" diff --git a/myenv/Lib/site-packages/groq/types/create_embedding_response.py b/myenv/Lib/site-packages/groq/types/create_embedding_response.py deleted file mode 100644 index eff247a..0000000 --- a/myenv/Lib/site-packages/groq/types/create_embedding_response.py +++ /dev/null @@ -1,31 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List -from typing_extensions import Literal - -from .._models import BaseModel -from .embedding import Embedding - -__all__ = ["CreateEmbeddingResponse", "Usage"] - - -class Usage(BaseModel): - prompt_tokens: int - """The number of tokens used by the prompt.""" - - total_tokens: int - """The total number of tokens used by the request.""" - - -class CreateEmbeddingResponse(BaseModel): - data: List[Embedding] - """The list of embeddings generated by the model.""" - - model: str - """The name of the model used to generate the embedding.""" - - object: Literal["list"] - """The object type, which is always "list".""" - - usage: Usage - """The usage information for the request.""" diff --git a/myenv/Lib/site-packages/groq/types/embedding.py b/myenv/Lib/site-packages/groq/types/embedding.py deleted file mode 100644 index 6dad3c3..0000000 --- a/myenv/Lib/site-packages/groq/types/embedding.py +++ /dev/null @@ -1,23 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List, Union -from typing_extensions import Literal - -from .._models import BaseModel - -__all__ = ["Embedding"] - - -class Embedding(BaseModel): - embedding: Union[List[float], str] - """The embedding vector, which is a list of floats. - - The length of vector depends on the model as listed in the - [embedding guide](/docs/guides/embeddings). - """ - - index: int - """The index of the embedding in the list of embeddings.""" - - object: Literal["embedding"] - """The object type, which is always "embedding".""" diff --git a/myenv/Lib/site-packages/groq/types/embedding_create_params.py b/myenv/Lib/site-packages/groq/types/embedding_create_params.py deleted file mode 100644 index 67a5991..0000000 --- a/myenv/Lib/site-packages/groq/types/embedding_create_params.py +++ /dev/null @@ -1,30 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import List, Union, Optional -from typing_extensions import Literal, Required, TypedDict - -__all__ = ["EmbeddingCreateParams"] - - -class EmbeddingCreateParams(TypedDict, total=False): - input: Required[Union[str, List[str]]] - """Input text to embed, encoded as a string or array of tokens. - - To embed multiple inputs in a single request, pass an array of strings or array - of token arrays. The input must not exceed the max input tokens for the model, - cannot be an empty string, and any array must be 2048 dimensions or less. - """ - - model: Required[Union[str, Literal["nomic-embed-text-v1_5"]]] - """ID of the model to use.""" - - encoding_format: Literal["float", "base64"] - """The format to return the embeddings in. Can only be `float` or `base64`.""" - - user: Optional[str] - """ - A unique identifier representing your end-user, which can help us monitor and - detect abuse. - """ diff --git a/myenv/Lib/site-packages/groq/types/model.py b/myenv/Lib/site-packages/groq/types/model.py deleted file mode 100644 index 2631ee8..0000000 --- a/myenv/Lib/site-packages/groq/types/model.py +++ /dev/null @@ -1,21 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing_extensions import Literal - -from .._models import BaseModel - -__all__ = ["Model"] - - -class Model(BaseModel): - id: str - """The model identifier, which can be referenced in the API endpoints.""" - - created: int - """The Unix timestamp (in seconds) when the model was created.""" - - object: Literal["model"] - """The object type, which is always "model".""" - - owned_by: str - """The organization that owns the model.""" diff --git a/myenv/Lib/site-packages/groq/types/model_deleted.py b/myenv/Lib/site-packages/groq/types/model_deleted.py deleted file mode 100644 index d9a48bb..0000000 --- a/myenv/Lib/site-packages/groq/types/model_deleted.py +++ /dev/null @@ -1,15 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - - - -from .._models import BaseModel - -__all__ = ["ModelDeleted"] - - -class ModelDeleted(BaseModel): - id: str - - deleted: bool - - object: str diff --git a/myenv/Lib/site-packages/groq/types/model_list_response.py b/myenv/Lib/site-packages/groq/types/model_list_response.py deleted file mode 100644 index 8f83544..0000000 --- a/myenv/Lib/site-packages/groq/types/model_list_response.py +++ /dev/null @@ -1,15 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import List -from typing_extensions import Literal - -from .model import Model -from .._models import BaseModel - -__all__ = ["ModelListResponse"] - - -class ModelListResponse(BaseModel): - data: List[Model] - - object: Literal["list"] diff --git a/myenv/Lib/site-packages/groq/types/shared/__init__.py b/myenv/Lib/site-packages/groq/types/shared/__init__.py deleted file mode 100644 index e085744..0000000 --- a/myenv/Lib/site-packages/groq/types/shared/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .error_object import ErrorObject as ErrorObject -from .function_definition import FunctionDefinition as FunctionDefinition -from .function_parameters import FunctionParameters as FunctionParameters diff --git a/myenv/Lib/site-packages/groq/types/shared/error_object.py b/myenv/Lib/site-packages/groq/types/shared/error_object.py deleted file mode 100644 index 32d7045..0000000 --- a/myenv/Lib/site-packages/groq/types/shared/error_object.py +++ /dev/null @@ -1,17 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Optional - -from ..._models import BaseModel - -__all__ = ["ErrorObject"] - - -class ErrorObject(BaseModel): - code: Optional[str] = None - - message: str - - param: Optional[str] = None - - type: str diff --git a/myenv/Lib/site-packages/groq/types/shared/function_definition.py b/myenv/Lib/site-packages/groq/types/shared/function_definition.py deleted file mode 100644 index 8abd62a..0000000 --- a/myenv/Lib/site-packages/groq/types/shared/function_definition.py +++ /dev/null @@ -1,33 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Optional - -from ..._models import BaseModel -from .function_parameters import FunctionParameters - -__all__ = ["FunctionDefinition"] - - -class FunctionDefinition(BaseModel): - name: str - """The name of the function to be called. - - Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length - of 64. - """ - - description: Optional[str] = None - """ - A description of what the function does, used by the model to choose when and - how to call the function. - """ - - parameters: Optional[FunctionParameters] = None - """The parameters the functions accepts, described as a JSON Schema object. - - See the docs on [tool use](/docs/tool-use) for examples, and the - [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for - documentation about the format. - - Omitting `parameters` defines a function with an empty parameter list. - """ diff --git a/myenv/Lib/site-packages/groq/types/shared/function_parameters.py b/myenv/Lib/site-packages/groq/types/shared/function_parameters.py deleted file mode 100644 index a3d83e3..0000000 --- a/myenv/Lib/site-packages/groq/types/shared/function_parameters.py +++ /dev/null @@ -1,8 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from typing import Dict -from typing_extensions import TypeAlias - -__all__ = ["FunctionParameters"] - -FunctionParameters: TypeAlias = Dict[str, object] diff --git a/myenv/Lib/site-packages/groq/types/shared_params/__init__.py b/myenv/Lib/site-packages/groq/types/shared_params/__init__.py deleted file mode 100644 index ef638cb..0000000 --- a/myenv/Lib/site-packages/groq/types/shared_params/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from .function_definition import FunctionDefinition as FunctionDefinition -from .function_parameters import FunctionParameters as FunctionParameters diff --git a/myenv/Lib/site-packages/groq/types/shared_params/function_definition.py b/myenv/Lib/site-packages/groq/types/shared_params/function_definition.py deleted file mode 100644 index 0799867..0000000 --- a/myenv/Lib/site-packages/groq/types/shared_params/function_definition.py +++ /dev/null @@ -1,34 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing_extensions import Required, TypedDict - -from .function_parameters import FunctionParameters - -__all__ = ["FunctionDefinition"] - - -class FunctionDefinition(TypedDict, total=False): - name: Required[str] - """The name of the function to be called. - - Must be a-z, A-Z, 0-9, or contain underscores and dashes, with a maximum length - of 64. - """ - - description: str - """ - A description of what the function does, used by the model to choose when and - how to call the function. - """ - - parameters: FunctionParameters - """The parameters the functions accepts, described as a JSON Schema object. - - See the docs on [tool use](/docs/tool-use) for examples, and the - [JSON Schema reference](https://json-schema.org/understanding-json-schema/) for - documentation about the format. - - Omitting `parameters` defines a function with an empty parameter list. - """ diff --git a/myenv/Lib/site-packages/groq/types/shared_params/function_parameters.py b/myenv/Lib/site-packages/groq/types/shared_params/function_parameters.py deleted file mode 100644 index 45fc742..0000000 --- a/myenv/Lib/site-packages/groq/types/shared_params/function_parameters.py +++ /dev/null @@ -1,10 +0,0 @@ -# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details. - -from __future__ import annotations - -from typing import Dict -from typing_extensions import TypeAlias - -__all__ = ["FunctionParameters"] - -FunctionParameters: TypeAlias = Dict[str, object] diff --git a/myenv/Lib/site-packages/h11-0.14.0.dist-info/INSTALLER b/myenv/Lib/site-packages/h11-0.14.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/h11-0.14.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/h11-0.14.0.dist-info/LICENSE.txt b/myenv/Lib/site-packages/h11-0.14.0.dist-info/LICENSE.txt deleted file mode 100644 index 8f080ea..0000000 --- a/myenv/Lib/site-packages/h11-0.14.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,22 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 Nathaniel J. Smith and other contributors - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE -LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION -WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/myenv/Lib/site-packages/h11-0.14.0.dist-info/METADATA b/myenv/Lib/site-packages/h11-0.14.0.dist-info/METADATA deleted file mode 100644 index cf12a82..0000000 --- a/myenv/Lib/site-packages/h11-0.14.0.dist-info/METADATA +++ /dev/null @@ -1,193 +0,0 @@ -Metadata-Version: 2.1 -Name: h11 -Version: 0.14.0 -Summary: A pure-Python, bring-your-own-I/O implementation of HTTP/1.1 -Home-page: https://github.com/python-hyper/h11 -Author: Nathaniel J. Smith -Author-email: njs@pobox.com -License: MIT -Classifier: Development Status :: 3 - Alpha -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: MIT License -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Topic :: Internet :: WWW/HTTP -Classifier: Topic :: System :: Networking -Requires-Python: >=3.7 -License-File: LICENSE.txt -Requires-Dist: typing-extensions ; python_version < "3.8" - -h11 -=== - -.. image:: https://travis-ci.org/python-hyper/h11.svg?branch=master - :target: https://travis-ci.org/python-hyper/h11 - :alt: Automated test status - -.. image:: https://codecov.io/gh/python-hyper/h11/branch/master/graph/badge.svg - :target: https://codecov.io/gh/python-hyper/h11 - :alt: Test coverage - -.. image:: https://readthedocs.org/projects/h11/badge/?version=latest - :target: http://h11.readthedocs.io/en/latest/?badge=latest - :alt: Documentation Status - -This is a little HTTP/1.1 library written from scratch in Python, -heavily inspired by `hyper-h2 `_. - -It's a "bring-your-own-I/O" library; h11 contains no IO code -whatsoever. This means you can hook h11 up to your favorite network -API, and that could be anything you want: synchronous, threaded, -asynchronous, or your own implementation of `RFC 6214 -`_ -- h11 won't judge you. -(Compare this to the current state of the art, where every time a `new -network API `_ comes along then someone -gets to start over reimplementing the entire HTTP protocol from -scratch.) Cory Benfield made an `excellent blog post describing the -benefits of this approach -`_, or if you like video -then here's his `PyCon 2016 talk on the same theme -`_. - -This also means that h11 is not immediately useful out of the box: -it's a toolkit for building programs that speak HTTP, not something -that could directly replace ``requests`` or ``twisted.web`` or -whatever. But h11 makes it much easier to implement something like -``requests`` or ``twisted.web``. - -At a high level, working with h11 goes like this: - -1) First, create an ``h11.Connection`` object to track the state of a - single HTTP/1.1 connection. - -2) When you read data off the network, pass it to - ``conn.receive_data(...)``; you'll get back a list of objects - representing high-level HTTP "events". - -3) When you want to send a high-level HTTP event, create the - corresponding "event" object and pass it to ``conn.send(...)``; - this will give you back some bytes that you can then push out - through the network. - -For example, a client might instantiate and then send a -``h11.Request`` object, then zero or more ``h11.Data`` objects for the -request body (e.g., if this is a POST), and then a -``h11.EndOfMessage`` to indicate the end of the message. Then the -server would then send back a ``h11.Response``, some ``h11.Data``, and -its own ``h11.EndOfMessage``. If either side violates the protocol, -you'll get a ``h11.ProtocolError`` exception. - -h11 is suitable for implementing both servers and clients, and has a -pleasantly symmetric API: the events you send as a client are exactly -the ones that you receive as a server and vice-versa. - -`Here's an example of a tiny HTTP client -`_ - -It also has `a fine manual `_. - -FAQ ---- - -*Whyyyyy?* - -I wanted to play with HTTP in `Curio -`__ and `Trio -`__, which at the time didn't have any -HTTP libraries. So I thought, no big deal, Python has, like, a dozen -different implementations of HTTP, surely I can find one that's -reusable. I didn't find one, but I did find Cory's call-to-arms -blog-post. So I figured, well, fine, if I have to implement HTTP from -scratch, at least I can make sure no-one *else* has to ever again. - -*Should I use it?* - -Maybe. You should be aware that it's a very young project. But, it's -feature complete and has an exhaustive test-suite and complete docs, -so the next step is for people to try using it and see how it goes -:-). If you do then please let us know -- if nothing else we'll want -to talk to you before making any incompatible changes! - -*What are the features/limitations?* - -Roughly speaking, it's trying to be a robust, complete, and non-hacky -implementation of the first "chapter" of the HTTP/1.1 spec: `RFC 7230: -HTTP/1.1 Message Syntax and Routing -`_. That is, it mostly focuses on -implementing HTTP at the level of taking bytes on and off the wire, -and the headers related to that, and tries to be anal about spec -conformance. It doesn't know about higher-level concerns like URL -routing, conditional GETs, cross-origin cookie policies, or content -negotiation. But it does know how to take care of framing, -cross-version differences in keep-alive handling, and the "obsolete -line folding" rule, so you can focus your energies on the hard / -interesting parts for your application, and it tries to support the -full specification in the sense that any useful HTTP/1.1 conformant -application should be able to use h11. - -It's pure Python, and has no dependencies outside of the standard -library. - -It has a test suite with 100.0% coverage for both statements and -branches. - -Currently it supports Python 3 (testing on 3.7-3.10) and PyPy 3. -The last Python 2-compatible version was h11 0.11.x. -(Originally it had a Cython wrapper for `http-parser -`_ and a beautiful nested state -machine implemented with ``yield from`` to postprocess the output. But -I had to take these out -- the new *parser* needs fewer lines-of-code -than the old *parser wrapper*, is written in pure Python, uses no -exotic language syntax, and has more features. It's sad, really; that -old state machine was really slick. I just need a few sentences here -to mourn that.) - -I don't know how fast it is. I haven't benchmarked or profiled it yet, -so it's probably got a few pointless hot spots, and I've been trying -to err on the side of simplicity and robustness instead of -micro-optimization. But at the architectural level I tried hard to -avoid fundamentally bad decisions, e.g., I believe that all the -parsing algorithms remain linear-time even in the face of pathological -input like slowloris, and there are no byte-by-byte loops. (I also -believe that it maintains bounded memory usage in the face of -arbitrary/pathological input.) - -The whole library is ~800 lines-of-code. You can read and understand -the whole thing in less than an hour. Most of the energy invested in -this so far has been spent on trying to keep things simple by -minimizing special-cases and ad hoc state manipulation; even though it -is now quite small and simple, I'm still annoyed that I haven't -figured out how to make it even smaller and simpler. (Unfortunately, -HTTP does not lend itself to simplicity.) - -The API is ~feature complete and I don't expect the general outlines -to change much, but you can't judge an API's ergonomics until you -actually document and use it, so I'd expect some changes in the -details. - -*How do I try it?* - -.. code-block:: sh - - $ pip install h11 - $ git clone git@github.com:python-hyper/h11 - $ cd h11/examples - $ python basic-client.py - -and go from there. - -*License?* - -MIT - -*Code of conduct?* - -Contributors are requested to follow our `code of conduct -`_ in -all project spaces. diff --git a/myenv/Lib/site-packages/h11-0.14.0.dist-info/RECORD b/myenv/Lib/site-packages/h11-0.14.0.dist-info/RECORD deleted file mode 100644 index 7fdd1f6..0000000 --- a/myenv/Lib/site-packages/h11-0.14.0.dist-info/RECORD +++ /dev/null @@ -1,52 +0,0 @@ -h11-0.14.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -h11-0.14.0.dist-info/LICENSE.txt,sha256=N9tbuFkm2yikJ6JYZ_ELEjIAOuob5pzLhRE4rbjm82E,1124 -h11-0.14.0.dist-info/METADATA,sha256=B7pZ0m7WBXNs17vl6hUH9bJTL9s37DaGvY31w7jNxSg,8175 -h11-0.14.0.dist-info/RECORD,, -h11-0.14.0.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 -h11-0.14.0.dist-info/top_level.txt,sha256=F7dC4jl3zeh8TGHEPaWJrMbeuoWbS379Gwdi-Yvdcis,4 -h11/__init__.py,sha256=iO1KzkSO42yZ6ffg-VMgbx_ZVTWGUY00nRYEWn-s3kY,1507 -h11/__pycache__/__init__.cpython-312.pyc,, -h11/__pycache__/_abnf.cpython-312.pyc,, -h11/__pycache__/_connection.cpython-312.pyc,, -h11/__pycache__/_events.cpython-312.pyc,, -h11/__pycache__/_headers.cpython-312.pyc,, -h11/__pycache__/_readers.cpython-312.pyc,, -h11/__pycache__/_receivebuffer.cpython-312.pyc,, -h11/__pycache__/_state.cpython-312.pyc,, -h11/__pycache__/_util.cpython-312.pyc,, -h11/__pycache__/_version.cpython-312.pyc,, -h11/__pycache__/_writers.cpython-312.pyc,, -h11/_abnf.py,sha256=ybixr0xsupnkA6GFAyMubuXF6Tc1lb_hF890NgCsfNc,4815 -h11/_connection.py,sha256=eS2sorMD0zKLCFiB9lW9W9F_Nzny2tjHa4e6s1ujr1c,26539 -h11/_events.py,sha256=LEfuvg1AbhHaVRwxCd0I-pFn9-ezUOaoL8o2Kvy1PBA,11816 -h11/_headers.py,sha256=RqB8cd8CN0blYPzcLe5qeCh-phv6D1U_CHj4hs67lgQ,10230 -h11/_readers.py,sha256=EbSed0jzwVUiD1nOPAeUcVE4Flf3wXkxfb8c06-OTBM,8383 -h11/_receivebuffer.py,sha256=xrspsdsNgWFxRfQcTXxR8RrdjRXXTK0Io5cQYWpJ1Ws,5252 -h11/_state.py,sha256=k1VL6SDbaPkSrZ-49ewCXDpuiUS69_46YhbWjuV1qEY,13300 -h11/_util.py,sha256=LWkkjXyJaFlAy6Lt39w73UStklFT5ovcvo0TkY7RYuk,4888 -h11/_version.py,sha256=LVyTdiZRzIIEv79UyOgbM5iUrJUllEzlCWaJEYBY1zc,686 -h11/_writers.py,sha256=oFKm6PtjeHfbj4RLX7VB7KDc1gIY53gXG3_HR9ltmTA,5081 -h11/py.typed,sha256=sow9soTwP9T_gEAQSVh7Gb8855h04Nwmhs2We-JRgZM,7 -h11/tests/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -h11/tests/__pycache__/__init__.cpython-312.pyc,, -h11/tests/__pycache__/helpers.cpython-312.pyc,, -h11/tests/__pycache__/test_against_stdlib_http.cpython-312.pyc,, -h11/tests/__pycache__/test_connection.cpython-312.pyc,, -h11/tests/__pycache__/test_events.cpython-312.pyc,, -h11/tests/__pycache__/test_headers.cpython-312.pyc,, -h11/tests/__pycache__/test_helpers.cpython-312.pyc,, -h11/tests/__pycache__/test_io.cpython-312.pyc,, -h11/tests/__pycache__/test_receivebuffer.cpython-312.pyc,, -h11/tests/__pycache__/test_state.cpython-312.pyc,, -h11/tests/__pycache__/test_util.cpython-312.pyc,, -h11/tests/data/test-file,sha256=ZJ03Rqs98oJw29OHzJg7LlMzyGQaRAY0r3AqBeM2wVU,65 -h11/tests/helpers.py,sha256=a1EVG_p7xU4wRsa3tMPTRxuaKCmretok9sxXWvqfmQA,3355 -h11/tests/test_against_stdlib_http.py,sha256=cojCHgHXFQ8gWhNlEEwl3trmOpN-5uDukRoHnElqo3A,3995 -h11/tests/test_connection.py,sha256=ZbPLDPclKvjgjAhgk-WlCPBaf17c4XUIV2tpaW08jOI,38720 -h11/tests/test_events.py,sha256=LPVLbcV-NvPNK9fW3rraR6Bdpz1hAlsWubMtNaJ5gHg,4657 -h11/tests/test_headers.py,sha256=qd8T1Zenuz5GbD6wklSJ5G8VS7trrYgMV0jT-SMvqg8,5612 -h11/tests/test_helpers.py,sha256=kAo0CEM4LGqmyyP2ZFmhsyq3UFJqoFfAbzu3hbWreRM,794 -h11/tests/test_io.py,sha256=uCZVnjarkRBkudfC1ij-KSCQ71XWJhnkgkgWWkKgYPQ,16386 -h11/tests/test_receivebuffer.py,sha256=3jGbeJM36Akqg_pAhPb7XzIn2NS6RhPg-Ryg8Eu6ytk,3454 -h11/tests/test_state.py,sha256=rqll9WqFsJPE0zSrtCn9LH659mPKsDeXZ-DwXwleuBQ,8928 -h11/tests/test_util.py,sha256=VO5L4nSFe4pgtSwKuv6u_6l0H7UeizF5WKuHTWreg70,2970 diff --git a/myenv/Lib/site-packages/h11-0.14.0.dist-info/WHEEL b/myenv/Lib/site-packages/h11-0.14.0.dist-info/WHEEL deleted file mode 100644 index 5bad85f..0000000 --- a/myenv/Lib/site-packages/h11-0.14.0.dist-info/WHEEL +++ /dev/null @@ -1,5 +0,0 @@ -Wheel-Version: 1.0 -Generator: bdist_wheel (0.37.0) -Root-Is-Purelib: true -Tag: py3-none-any - diff --git a/myenv/Lib/site-packages/h11-0.14.0.dist-info/top_level.txt b/myenv/Lib/site-packages/h11-0.14.0.dist-info/top_level.txt deleted file mode 100644 index 0d24def..0000000 --- a/myenv/Lib/site-packages/h11-0.14.0.dist-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -h11 diff --git a/myenv/Lib/site-packages/h11/__init__.py b/myenv/Lib/site-packages/h11/__init__.py deleted file mode 100644 index 989e92c..0000000 --- a/myenv/Lib/site-packages/h11/__init__.py +++ /dev/null @@ -1,62 +0,0 @@ -# A highish-level implementation of the HTTP/1.1 wire protocol (RFC 7230), -# containing no networking code at all, loosely modelled on hyper-h2's generic -# implementation of HTTP/2 (and in particular the h2.connection.H2Connection -# class). There's still a bunch of subtle details you need to get right if you -# want to make this actually useful, because it doesn't implement all the -# semantics to check that what you're asking to write to the wire is sensible, -# but at least it gets you out of dealing with the wire itself. - -from h11._connection import Connection, NEED_DATA, PAUSED -from h11._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from h11._state import ( - CLIENT, - CLOSED, - DONE, - ERROR, - IDLE, - MIGHT_SWITCH_PROTOCOL, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, - SWITCHED_PROTOCOL, -) -from h11._util import LocalProtocolError, ProtocolError, RemoteProtocolError -from h11._version import __version__ - -PRODUCT_ID = "python-h11/" + __version__ - - -__all__ = ( - "Connection", - "NEED_DATA", - "PAUSED", - "ConnectionClosed", - "Data", - "EndOfMessage", - "Event", - "InformationalResponse", - "Request", - "Response", - "CLIENT", - "CLOSED", - "DONE", - "ERROR", - "IDLE", - "MUST_CLOSE", - "SEND_BODY", - "SEND_RESPONSE", - "SERVER", - "SWITCHED_PROTOCOL", - "ProtocolError", - "LocalProtocolError", - "RemoteProtocolError", -) diff --git a/myenv/Lib/site-packages/h11/_abnf.py b/myenv/Lib/site-packages/h11/_abnf.py deleted file mode 100644 index 933587f..0000000 --- a/myenv/Lib/site-packages/h11/_abnf.py +++ /dev/null @@ -1,132 +0,0 @@ -# We use native strings for all the re patterns, to take advantage of string -# formatting, and then convert to bytestrings when compiling the final re -# objects. - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#whitespace -# OWS = *( SP / HTAB ) -# ; optional whitespace -OWS = r"[ \t]*" - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.token.separators -# token = 1*tchar -# -# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" -# / "+" / "-" / "." / "^" / "_" / "`" / "|" / "~" -# / DIGIT / ALPHA -# ; any VCHAR, except delimiters -token = r"[-!#$%&'*+.^_`|~0-9a-zA-Z]+" - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#header.fields -# field-name = token -field_name = token - -# The standard says: -# -# field-value = *( field-content / obs-fold ) -# field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] -# field-vchar = VCHAR / obs-text -# obs-fold = CRLF 1*( SP / HTAB ) -# ; obsolete line folding -# ; see Section 3.2.4 -# -# https://tools.ietf.org/html/rfc5234#appendix-B.1 -# -# VCHAR = %x21-7E -# ; visible (printing) characters -# -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#rule.quoted-string -# obs-text = %x80-FF -# -# However, the standard definition of field-content is WRONG! It disallows -# fields containing a single visible character surrounded by whitespace, -# e.g. "foo a bar". -# -# See: https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189 -# -# So our definition of field_content attempts to fix it up... -# -# Also, we allow lots of control characters, because apparently people assume -# that they're legal in practice (e.g., google analytics makes cookies with -# \x01 in them!): -# https://github.com/python-hyper/h11/issues/57 -# We still don't allow NUL or whitespace, because those are often treated as -# meta-characters and letting them through can lead to nasty issues like SSRF. -vchar = r"[\x21-\x7e]" -vchar_or_obs_text = r"[^\x00\s]" -field_vchar = vchar_or_obs_text -field_content = r"{field_vchar}+(?:[ \t]+{field_vchar}+)*".format(**globals()) - -# We handle obs-fold at a different level, and our fixed-up field_content -# already grows to swallow the whole value, so ? instead of * -field_value = r"({field_content})?".format(**globals()) - -# header-field = field-name ":" OWS field-value OWS -header_field = ( - r"(?P{field_name})" - r":" - r"{OWS}" - r"(?P{field_value})" - r"{OWS}".format(**globals()) -) - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#request.line -# -# request-line = method SP request-target SP HTTP-version CRLF -# method = token -# HTTP-version = HTTP-name "/" DIGIT "." DIGIT -# HTTP-name = %x48.54.54.50 ; "HTTP", case-sensitive -# -# request-target is complicated (see RFC 7230 sec 5.3) -- could be path, full -# URL, host+port (for connect), or even "*", but in any case we are guaranteed -# that it contists of the visible printing characters. -method = token -request_target = r"{vchar}+".format(**globals()) -http_version = r"HTTP/(?P[0-9]\.[0-9])" -request_line = ( - r"(?P{method})" - r" " - r"(?P{request_target})" - r" " - r"{http_version}".format(**globals()) -) - -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#status.line -# -# status-line = HTTP-version SP status-code SP reason-phrase CRLF -# status-code = 3DIGIT -# reason-phrase = *( HTAB / SP / VCHAR / obs-text ) -status_code = r"[0-9]{3}" -reason_phrase = r"([ \t]|{vchar_or_obs_text})*".format(**globals()) -status_line = ( - r"{http_version}" - r" " - r"(?P{status_code})" - # However, there are apparently a few too many servers out there that just - # leave out the reason phrase: - # https://github.com/scrapy/scrapy/issues/345#issuecomment-281756036 - # https://github.com/seanmonstar/httparse/issues/29 - # so make it optional. ?: is a non-capturing group. - r"(?: (?P{reason_phrase}))?".format(**globals()) -) - -HEXDIG = r"[0-9A-Fa-f]" -# Actually -# -# chunk-size = 1*HEXDIG -# -# but we impose an upper-limit to avoid ridiculosity. len(str(2**64)) == 20 -chunk_size = r"({HEXDIG}){{1,20}}".format(**globals()) -# Actually -# -# chunk-ext = *( ";" chunk-ext-name [ "=" chunk-ext-val ] ) -# -# but we aren't parsing the things so we don't really care. -chunk_ext = r";.*" -chunk_header = ( - r"(?P{chunk_size})" - r"(?P{chunk_ext})?" - r"{OWS}\r\n".format( - **globals() - ) # Even though the specification does not allow for extra whitespaces, - # we are lenient with trailing whitespaces because some servers on the wild use it. -) diff --git a/myenv/Lib/site-packages/h11/_connection.py b/myenv/Lib/site-packages/h11/_connection.py deleted file mode 100644 index d175270..0000000 --- a/myenv/Lib/site-packages/h11/_connection.py +++ /dev/null @@ -1,633 +0,0 @@ -# This contains the main Connection class. Everything in h11 revolves around -# this. -from typing import Any, Callable, cast, Dict, List, Optional, Tuple, Type, Union - -from ._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from ._headers import get_comma_header, has_expect_100_continue, set_comma_header -from ._readers import READERS, ReadersType -from ._receivebuffer import ReceiveBuffer -from ._state import ( - _SWITCH_CONNECT, - _SWITCH_UPGRADE, - CLIENT, - ConnectionState, - DONE, - ERROR, - MIGHT_SWITCH_PROTOCOL, - SEND_BODY, - SERVER, - SWITCHED_PROTOCOL, -) -from ._util import ( # Import the internal things we need - LocalProtocolError, - RemoteProtocolError, - Sentinel, -) -from ._writers import WRITERS, WritersType - -# Everything in __all__ gets re-exported as part of the h11 public API. -__all__ = ["Connection", "NEED_DATA", "PAUSED"] - - -class NEED_DATA(Sentinel, metaclass=Sentinel): - pass - - -class PAUSED(Sentinel, metaclass=Sentinel): - pass - - -# If we ever have this much buffered without it making a complete parseable -# event, we error out. The only time we really buffer is when reading the -# request/response line + headers together, so this is effectively the limit on -# the size of that. -# -# Some precedents for defaults: -# - node.js: 80 * 1024 -# - tomcat: 8 * 1024 -# - IIS: 16 * 1024 -# - Apache: <8 KiB per line> -DEFAULT_MAX_INCOMPLETE_EVENT_SIZE = 16 * 1024 - -# RFC 7230's rules for connection lifecycles: -# - If either side says they want to close the connection, then the connection -# must close. -# - HTTP/1.1 defaults to keep-alive unless someone says Connection: close -# - HTTP/1.0 defaults to close unless both sides say Connection: keep-alive -# (and even this is a mess -- e.g. if you're implementing a proxy then -# sending Connection: keep-alive is forbidden). -# -# We simplify life by simply not supporting keep-alive with HTTP/1.0 peers. So -# our rule is: -# - If someone says Connection: close, we will close -# - If someone uses HTTP/1.0, we will close. -def _keep_alive(event: Union[Request, Response]) -> bool: - connection = get_comma_header(event.headers, b"connection") - if b"close" in connection: - return False - if getattr(event, "http_version", b"1.1") < b"1.1": - return False - return True - - -def _body_framing( - request_method: bytes, event: Union[Request, Response] -) -> Tuple[str, Union[Tuple[()], Tuple[int]]]: - # Called when we enter SEND_BODY to figure out framing information for - # this body. - # - # These are the only two events that can trigger a SEND_BODY state: - assert type(event) in (Request, Response) - # Returns one of: - # - # ("content-length", count) - # ("chunked", ()) - # ("http/1.0", ()) - # - # which are (lookup key, *args) for constructing body reader/writer - # objects. - # - # Reference: https://tools.ietf.org/html/rfc7230#section-3.3.3 - # - # Step 1: some responses always have an empty body, regardless of what the - # headers say. - if type(event) is Response: - if ( - event.status_code in (204, 304) - or request_method == b"HEAD" - or (request_method == b"CONNECT" and 200 <= event.status_code < 300) - ): - return ("content-length", (0,)) - # Section 3.3.3 also lists another case -- responses with status_code - # < 200. For us these are InformationalResponses, not Responses, so - # they can't get into this function in the first place. - assert event.status_code >= 200 - - # Step 2: check for Transfer-Encoding (T-E beats C-L): - transfer_encodings = get_comma_header(event.headers, b"transfer-encoding") - if transfer_encodings: - assert transfer_encodings == [b"chunked"] - return ("chunked", ()) - - # Step 3: check for Content-Length - content_lengths = get_comma_header(event.headers, b"content-length") - if content_lengths: - return ("content-length", (int(content_lengths[0]),)) - - # Step 4: no applicable headers; fallback/default depends on type - if type(event) is Request: - return ("content-length", (0,)) - else: - return ("http/1.0", ()) - - -################################################################ -# -# The main Connection class -# -################################################################ - - -class Connection: - """An object encapsulating the state of an HTTP connection. - - Args: - our_role: If you're implementing a client, pass :data:`h11.CLIENT`. If - you're implementing a server, pass :data:`h11.SERVER`. - - max_incomplete_event_size (int): - The maximum number of bytes we're willing to buffer of an - incomplete event. In practice this mostly sets a limit on the - maximum size of the request/response line + headers. If this is - exceeded, then :meth:`next_event` will raise - :exc:`RemoteProtocolError`. - - """ - - def __init__( - self, - our_role: Type[Sentinel], - max_incomplete_event_size: int = DEFAULT_MAX_INCOMPLETE_EVENT_SIZE, - ) -> None: - self._max_incomplete_event_size = max_incomplete_event_size - # State and role tracking - if our_role not in (CLIENT, SERVER): - raise ValueError("expected CLIENT or SERVER, not {!r}".format(our_role)) - self.our_role = our_role - self.their_role: Type[Sentinel] - if our_role is CLIENT: - self.their_role = SERVER - else: - self.their_role = CLIENT - self._cstate = ConnectionState() - - # Callables for converting data->events or vice-versa given the - # current state - self._writer = self._get_io_object(self.our_role, None, WRITERS) - self._reader = self._get_io_object(self.their_role, None, READERS) - - # Holds any unprocessed received data - self._receive_buffer = ReceiveBuffer() - # If this is true, then it indicates that the incoming connection was - # closed *after* the end of whatever's in self._receive_buffer: - self._receive_buffer_closed = False - - # Extra bits of state that don't fit into the state machine. - # - # These two are only used to interpret framing headers for figuring - # out how to read/write response bodies. their_http_version is also - # made available as a convenient public API. - self.their_http_version: Optional[bytes] = None - self._request_method: Optional[bytes] = None - # This is pure flow-control and doesn't at all affect the set of legal - # transitions, so no need to bother ConnectionState with it: - self.client_is_waiting_for_100_continue = False - - @property - def states(self) -> Dict[Type[Sentinel], Type[Sentinel]]: - """A dictionary like:: - - {CLIENT: , SERVER: } - - See :ref:`state-machine` for details. - - """ - return dict(self._cstate.states) - - @property - def our_state(self) -> Type[Sentinel]: - """The current state of whichever role we are playing. See - :ref:`state-machine` for details. - """ - return self._cstate.states[self.our_role] - - @property - def their_state(self) -> Type[Sentinel]: - """The current state of whichever role we are NOT playing. See - :ref:`state-machine` for details. - """ - return self._cstate.states[self.their_role] - - @property - def they_are_waiting_for_100_continue(self) -> bool: - return self.their_role is CLIENT and self.client_is_waiting_for_100_continue - - def start_next_cycle(self) -> None: - """Attempt to reset our connection state for a new request/response - cycle. - - If both client and server are in :data:`DONE` state, then resets them - both to :data:`IDLE` state in preparation for a new request/response - cycle on this same connection. Otherwise, raises a - :exc:`LocalProtocolError`. - - See :ref:`keepalive-and-pipelining`. - - """ - old_states = dict(self._cstate.states) - self._cstate.start_next_cycle() - self._request_method = None - # self.their_http_version gets left alone, since it presumably lasts - # beyond a single request/response cycle - assert not self.client_is_waiting_for_100_continue - self._respond_to_state_changes(old_states) - - def _process_error(self, role: Type[Sentinel]) -> None: - old_states = dict(self._cstate.states) - self._cstate.process_error(role) - self._respond_to_state_changes(old_states) - - def _server_switch_event(self, event: Event) -> Optional[Type[Sentinel]]: - if type(event) is InformationalResponse and event.status_code == 101: - return _SWITCH_UPGRADE - if type(event) is Response: - if ( - _SWITCH_CONNECT in self._cstate.pending_switch_proposals - and 200 <= event.status_code < 300 - ): - return _SWITCH_CONNECT - return None - - # All events go through here - def _process_event(self, role: Type[Sentinel], event: Event) -> None: - # First, pass the event through the state machine to make sure it - # succeeds. - old_states = dict(self._cstate.states) - if role is CLIENT and type(event) is Request: - if event.method == b"CONNECT": - self._cstate.process_client_switch_proposal(_SWITCH_CONNECT) - if get_comma_header(event.headers, b"upgrade"): - self._cstate.process_client_switch_proposal(_SWITCH_UPGRADE) - server_switch_event = None - if role is SERVER: - server_switch_event = self._server_switch_event(event) - self._cstate.process_event(role, type(event), server_switch_event) - - # Then perform the updates triggered by it. - - if type(event) is Request: - self._request_method = event.method - - if role is self.their_role and type(event) in ( - Request, - Response, - InformationalResponse, - ): - event = cast(Union[Request, Response, InformationalResponse], event) - self.their_http_version = event.http_version - - # Keep alive handling - # - # RFC 7230 doesn't really say what one should do if Connection: close - # shows up on a 1xx InformationalResponse. I think the idea is that - # this is not supposed to happen. In any case, if it does happen, we - # ignore it. - if type(event) in (Request, Response) and not _keep_alive( - cast(Union[Request, Response], event) - ): - self._cstate.process_keep_alive_disabled() - - # 100-continue - if type(event) is Request and has_expect_100_continue(event): - self.client_is_waiting_for_100_continue = True - if type(event) in (InformationalResponse, Response): - self.client_is_waiting_for_100_continue = False - if role is CLIENT and type(event) in (Data, EndOfMessage): - self.client_is_waiting_for_100_continue = False - - self._respond_to_state_changes(old_states, event) - - def _get_io_object( - self, - role: Type[Sentinel], - event: Optional[Event], - io_dict: Union[ReadersType, WritersType], - ) -> Optional[Callable[..., Any]]: - # event may be None; it's only used when entering SEND_BODY - state = self._cstate.states[role] - if state is SEND_BODY: - # Special case: the io_dict has a dict of reader/writer factories - # that depend on the request/response framing. - framing_type, args = _body_framing( - cast(bytes, self._request_method), cast(Union[Request, Response], event) - ) - return io_dict[SEND_BODY][framing_type](*args) # type: ignore[index] - else: - # General case: the io_dict just has the appropriate reader/writer - # for this state - return io_dict.get((role, state)) # type: ignore[return-value] - - # This must be called after any action that might have caused - # self._cstate.states to change. - def _respond_to_state_changes( - self, - old_states: Dict[Type[Sentinel], Type[Sentinel]], - event: Optional[Event] = None, - ) -> None: - # Update reader/writer - if self.our_state != old_states[self.our_role]: - self._writer = self._get_io_object(self.our_role, event, WRITERS) - if self.their_state != old_states[self.their_role]: - self._reader = self._get_io_object(self.their_role, event, READERS) - - @property - def trailing_data(self) -> Tuple[bytes, bool]: - """Data that has been received, but not yet processed, represented as - a tuple with two elements, where the first is a byte-string containing - the unprocessed data itself, and the second is a bool that is True if - the receive connection was closed. - - See :ref:`switching-protocols` for discussion of why you'd want this. - """ - return (bytes(self._receive_buffer), self._receive_buffer_closed) - - def receive_data(self, data: bytes) -> None: - """Add data to our internal receive buffer. - - This does not actually do any processing on the data, just stores - it. To trigger processing, you have to call :meth:`next_event`. - - Args: - data (:term:`bytes-like object`): - The new data that was just received. - - Special case: If *data* is an empty byte-string like ``b""``, - then this indicates that the remote side has closed the - connection (end of file). Normally this is convenient, because - standard Python APIs like :meth:`file.read` or - :meth:`socket.recv` use ``b""`` to indicate end-of-file, while - other failures to read are indicated using other mechanisms - like raising :exc:`TimeoutError`. When using such an API you - can just blindly pass through whatever you get from ``read`` - to :meth:`receive_data`, and everything will work. - - But, if you have an API where reading an empty string is a - valid non-EOF condition, then you need to be aware of this and - make sure to check for such strings and avoid passing them to - :meth:`receive_data`. - - Returns: - Nothing, but after calling this you should call :meth:`next_event` - to parse the newly received data. - - Raises: - RuntimeError: - Raised if you pass an empty *data*, indicating EOF, and then - pass a non-empty *data*, indicating more data that somehow - arrived after the EOF. - - (Calling ``receive_data(b"")`` multiple times is fine, - and equivalent to calling it once.) - - """ - if data: - if self._receive_buffer_closed: - raise RuntimeError("received close, then received more data?") - self._receive_buffer += data - else: - self._receive_buffer_closed = True - - def _extract_next_receive_event( - self, - ) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: - state = self.their_state - # We don't pause immediately when they enter DONE, because even in - # DONE state we can still process a ConnectionClosed() event. But - # if we have data in our buffer, then we definitely aren't getting - # a ConnectionClosed() immediately and we need to pause. - if state is DONE and self._receive_buffer: - return PAUSED - if state is MIGHT_SWITCH_PROTOCOL or state is SWITCHED_PROTOCOL: - return PAUSED - assert self._reader is not None - event = self._reader(self._receive_buffer) - if event is None: - if not self._receive_buffer and self._receive_buffer_closed: - # In some unusual cases (basically just HTTP/1.0 bodies), EOF - # triggers an actual protocol event; in that case, we want to - # return that event, and then the state will change and we'll - # get called again to generate the actual ConnectionClosed(). - if hasattr(self._reader, "read_eof"): - event = self._reader.read_eof() # type: ignore[attr-defined] - else: - event = ConnectionClosed() - if event is None: - event = NEED_DATA - return event # type: ignore[no-any-return] - - def next_event(self) -> Union[Event, Type[NEED_DATA], Type[PAUSED]]: - """Parse the next event out of our receive buffer, update our internal - state, and return it. - - This is a mutating operation -- think of it like calling :func:`next` - on an iterator. - - Returns: - : One of three things: - - 1) An event object -- see :ref:`events`. - - 2) The special constant :data:`NEED_DATA`, which indicates that - you need to read more data from your socket and pass it to - :meth:`receive_data` before this method will be able to return - any more events. - - 3) The special constant :data:`PAUSED`, which indicates that we - are not in a state where we can process incoming data (usually - because the peer has finished their part of the current - request/response cycle, and you have not yet called - :meth:`start_next_cycle`). See :ref:`flow-control` for details. - - Raises: - RemoteProtocolError: - The peer has misbehaved. You should close the connection - (possibly after sending some kind of 4xx response). - - Once this method returns :class:`ConnectionClosed` once, then all - subsequent calls will also return :class:`ConnectionClosed`. - - If this method raises any exception besides :exc:`RemoteProtocolError` - then that's a bug -- if it happens please file a bug report! - - If this method raises any exception then it also sets - :attr:`Connection.their_state` to :data:`ERROR` -- see - :ref:`error-handling` for discussion. - - """ - - if self.their_state is ERROR: - raise RemoteProtocolError("Can't receive data when peer state is ERROR") - try: - event = self._extract_next_receive_event() - if event not in [NEED_DATA, PAUSED]: - self._process_event(self.their_role, cast(Event, event)) - if event is NEED_DATA: - if len(self._receive_buffer) > self._max_incomplete_event_size: - # 431 is "Request header fields too large" which is pretty - # much the only situation where we can get here - raise RemoteProtocolError( - "Receive buffer too long", error_status_hint=431 - ) - if self._receive_buffer_closed: - # We're still trying to complete some event, but that's - # never going to happen because no more data is coming - raise RemoteProtocolError("peer unexpectedly closed connection") - return event - except BaseException as exc: - self._process_error(self.their_role) - if isinstance(exc, LocalProtocolError): - exc._reraise_as_remote_protocol_error() - else: - raise - - def send(self, event: Event) -> Optional[bytes]: - """Convert a high-level event into bytes that can be sent to the peer, - while updating our internal state machine. - - Args: - event: The :ref:`event ` to send. - - Returns: - If ``type(event) is ConnectionClosed``, then returns - ``None``. Otherwise, returns a :term:`bytes-like object`. - - Raises: - LocalProtocolError: - Sending this event at this time would violate our - understanding of the HTTP/1.1 protocol. - - If this method raises any exception then it also sets - :attr:`Connection.our_state` to :data:`ERROR` -- see - :ref:`error-handling` for discussion. - - """ - data_list = self.send_with_data_passthrough(event) - if data_list is None: - return None - else: - return b"".join(data_list) - - def send_with_data_passthrough(self, event: Event) -> Optional[List[bytes]]: - """Identical to :meth:`send`, except that in situations where - :meth:`send` returns a single :term:`bytes-like object`, this instead - returns a list of them -- and when sending a :class:`Data` event, this - list is guaranteed to contain the exact object you passed in as - :attr:`Data.data`. See :ref:`sendfile` for discussion. - - """ - if self.our_state is ERROR: - raise LocalProtocolError("Can't send data when our state is ERROR") - try: - if type(event) is Response: - event = self._clean_up_response_headers_for_sending(event) - # We want to call _process_event before calling the writer, - # because if someone tries to do something invalid then this will - # give a sensible error message, while our writers all just assume - # they will only receive valid events. But, _process_event might - # change self._writer. So we have to do a little dance: - writer = self._writer - self._process_event(self.our_role, event) - if type(event) is ConnectionClosed: - return None - else: - # In any situation where writer is None, process_event should - # have raised ProtocolError - assert writer is not None - data_list: List[bytes] = [] - writer(event, data_list.append) - return data_list - except: - self._process_error(self.our_role) - raise - - def send_failed(self) -> None: - """Notify the state machine that we failed to send the data it gave - us. - - This causes :attr:`Connection.our_state` to immediately become - :data:`ERROR` -- see :ref:`error-handling` for discussion. - - """ - self._process_error(self.our_role) - - # When sending a Response, we take responsibility for a few things: - # - # - Sometimes you MUST set Connection: close. We take care of those - # times. (You can also set it yourself if you want, and if you do then - # we'll respect that and close the connection at the right time. But you - # don't have to worry about that unless you want to.) - # - # - The user has to set Content-Length if they want it. Otherwise, for - # responses that have bodies (e.g. not HEAD), then we will automatically - # select the right mechanism for streaming a body of unknown length, - # which depends on depending on the peer's HTTP version. - # - # This function's *only* responsibility is making sure headers are set up - # right -- everything downstream just looks at the headers. There are no - # side channels. - def _clean_up_response_headers_for_sending(self, response: Response) -> Response: - assert type(response) is Response - - headers = response.headers - need_close = False - - # HEAD requests need some special handling: they always act like they - # have Content-Length: 0, and that's how _body_framing treats - # them. But their headers are supposed to match what we would send if - # the request was a GET. (Technically there is one deviation allowed: - # we're allowed to leave out the framing headers -- see - # https://tools.ietf.org/html/rfc7231#section-4.3.2 . But it's just as - # easy to get them right.) - method_for_choosing_headers = cast(bytes, self._request_method) - if method_for_choosing_headers == b"HEAD": - method_for_choosing_headers = b"GET" - framing_type, _ = _body_framing(method_for_choosing_headers, response) - if framing_type in ("chunked", "http/1.0"): - # This response has a body of unknown length. - # If our peer is HTTP/1.1, we use Transfer-Encoding: chunked - # If our peer is HTTP/1.0, we use no framing headers, and close the - # connection afterwards. - # - # Make sure to clear Content-Length (in principle user could have - # set both and then we ignored Content-Length b/c - # Transfer-Encoding overwrote it -- this would be naughty of them, - # but the HTTP spec says that if our peer does this then we have - # to fix it instead of erroring out, so we'll accord the user the - # same respect). - headers = set_comma_header(headers, b"content-length", []) - if self.their_http_version is None or self.their_http_version < b"1.1": - # Either we never got a valid request and are sending back an - # error (their_http_version is None), so we assume the worst; - # or else we did get a valid HTTP/1.0 request, so we know that - # they don't understand chunked encoding. - headers = set_comma_header(headers, b"transfer-encoding", []) - # This is actually redundant ATM, since currently we - # unconditionally disable keep-alive when talking to HTTP/1.0 - # peers. But let's be defensive just in case we add - # Connection: keep-alive support later: - if self._request_method != b"HEAD": - need_close = True - else: - headers = set_comma_header(headers, b"transfer-encoding", [b"chunked"]) - - if not self._cstate.keep_alive or need_close: - # Make sure Connection: close is set - connection = set(get_comma_header(headers, b"connection")) - connection.discard(b"keep-alive") - connection.add(b"close") - headers = set_comma_header(headers, b"connection", sorted(connection)) - - return Response( - headers=headers, - status_code=response.status_code, - http_version=response.http_version, - reason=response.reason, - ) diff --git a/myenv/Lib/site-packages/h11/_events.py b/myenv/Lib/site-packages/h11/_events.py deleted file mode 100644 index 075bf8a..0000000 --- a/myenv/Lib/site-packages/h11/_events.py +++ /dev/null @@ -1,369 +0,0 @@ -# High level events that make up HTTP/1.1 conversations. Loosely inspired by -# the corresponding events in hyper-h2: -# -# http://python-hyper.org/h2/en/stable/api.html#events -# -# Don't subclass these. Stuff will break. - -import re -from abc import ABC -from dataclasses import dataclass, field -from typing import Any, cast, Dict, List, Tuple, Union - -from ._abnf import method, request_target -from ._headers import Headers, normalize_and_validate -from ._util import bytesify, LocalProtocolError, validate - -# Everything in __all__ gets re-exported as part of the h11 public API. -__all__ = [ - "Event", - "Request", - "InformationalResponse", - "Response", - "Data", - "EndOfMessage", - "ConnectionClosed", -] - -method_re = re.compile(method.encode("ascii")) -request_target_re = re.compile(request_target.encode("ascii")) - - -class Event(ABC): - """ - Base class for h11 events. - """ - - __slots__ = () - - -@dataclass(init=False, frozen=True) -class Request(Event): - """The beginning of an HTTP request. - - Fields: - - .. attribute:: method - - An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte - string. :term:`Bytes-like objects ` and native - strings containing only ascii characters will be automatically - converted to byte strings. - - .. attribute:: target - - The target of an HTTP request, e.g. ``b"/index.html"``, or one of the - more exotic formats described in `RFC 7320, section 5.3 - `_. Always a byte - string. :term:`Bytes-like objects ` and native - strings containing only ascii characters will be automatically - converted to byte strings. - - .. attribute:: headers - - Request headers, represented as a list of (name, value) pairs. See - :ref:`the header normalization rules ` for details. - - .. attribute:: http_version - - The HTTP protocol version, represented as a byte string like - ``b"1.1"``. See :ref:`the HTTP version normalization rules - ` for details. - - """ - - __slots__ = ("method", "headers", "target", "http_version") - - method: bytes - headers: Headers - target: bytes - http_version: bytes - - def __init__( - self, - *, - method: Union[bytes, str], - headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], - target: Union[bytes, str], - http_version: Union[bytes, str] = b"1.1", - _parsed: bool = False, - ) -> None: - super().__init__() - if isinstance(headers, Headers): - object.__setattr__(self, "headers", headers) - else: - object.__setattr__( - self, "headers", normalize_and_validate(headers, _parsed=_parsed) - ) - if not _parsed: - object.__setattr__(self, "method", bytesify(method)) - object.__setattr__(self, "target", bytesify(target)) - object.__setattr__(self, "http_version", bytesify(http_version)) - else: - object.__setattr__(self, "method", method) - object.__setattr__(self, "target", target) - object.__setattr__(self, "http_version", http_version) - - # "A server MUST respond with a 400 (Bad Request) status code to any - # HTTP/1.1 request message that lacks a Host header field and to any - # request message that contains more than one Host header field or a - # Host header field with an invalid field-value." - # -- https://tools.ietf.org/html/rfc7230#section-5.4 - host_count = 0 - for name, value in self.headers: - if name == b"host": - host_count += 1 - if self.http_version == b"1.1" and host_count == 0: - raise LocalProtocolError("Missing mandatory Host: header") - if host_count > 1: - raise LocalProtocolError("Found multiple Host: headers") - - validate(method_re, self.method, "Illegal method characters") - validate(request_target_re, self.target, "Illegal target characters") - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class _ResponseBase(Event): - __slots__ = ("headers", "http_version", "reason", "status_code") - - headers: Headers - http_version: bytes - reason: bytes - status_code: int - - def __init__( - self, - *, - headers: Union[Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]]], - status_code: int, - http_version: Union[bytes, str] = b"1.1", - reason: Union[bytes, str] = b"", - _parsed: bool = False, - ) -> None: - super().__init__() - if isinstance(headers, Headers): - object.__setattr__(self, "headers", headers) - else: - object.__setattr__( - self, "headers", normalize_and_validate(headers, _parsed=_parsed) - ) - if not _parsed: - object.__setattr__(self, "reason", bytesify(reason)) - object.__setattr__(self, "http_version", bytesify(http_version)) - if not isinstance(status_code, int): - raise LocalProtocolError("status code must be integer") - # Because IntEnum objects are instances of int, but aren't - # duck-compatible (sigh), see gh-72. - object.__setattr__(self, "status_code", int(status_code)) - else: - object.__setattr__(self, "reason", reason) - object.__setattr__(self, "http_version", http_version) - object.__setattr__(self, "status_code", status_code) - - self.__post_init__() - - def __post_init__(self) -> None: - pass - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class InformationalResponse(_ResponseBase): - """An HTTP informational response. - - Fields: - - .. attribute:: status_code - - The status code of this response, as an integer. For an - :class:`InformationalResponse`, this is always in the range [100, - 200). - - .. attribute:: headers - - Request headers, represented as a list of (name, value) pairs. See - :ref:`the header normalization rules ` for - details. - - .. attribute:: http_version - - The HTTP protocol version, represented as a byte string like - ``b"1.1"``. See :ref:`the HTTP version normalization rules - ` for details. - - .. attribute:: reason - - The reason phrase of this response, as a byte string. For example: - ``b"OK"``, or ``b"Not Found"``. - - """ - - def __post_init__(self) -> None: - if not (100 <= self.status_code < 200): - raise LocalProtocolError( - "InformationalResponse status_code should be in range " - "[100, 200), not {}".format(self.status_code) - ) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class Response(_ResponseBase): - """The beginning of an HTTP response. - - Fields: - - .. attribute:: status_code - - The status code of this response, as an integer. For an - :class:`Response`, this is always in the range [200, - 1000). - - .. attribute:: headers - - Request headers, represented as a list of (name, value) pairs. See - :ref:`the header normalization rules ` for details. - - .. attribute:: http_version - - The HTTP protocol version, represented as a byte string like - ``b"1.1"``. See :ref:`the HTTP version normalization rules - ` for details. - - .. attribute:: reason - - The reason phrase of this response, as a byte string. For example: - ``b"OK"``, or ``b"Not Found"``. - - """ - - def __post_init__(self) -> None: - if not (200 <= self.status_code < 1000): - raise LocalProtocolError( - "Response status_code should be in range [200, 1000), not {}".format( - self.status_code - ) - ) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(init=False, frozen=True) -class Data(Event): - """Part of an HTTP message body. - - Fields: - - .. attribute:: data - - A :term:`bytes-like object` containing part of a message body. Or, if - using the ``combine=False`` argument to :meth:`Connection.send`, then - any object that your socket writing code knows what to do with, and for - which calling :func:`len` returns the number of bytes that will be - written -- see :ref:`sendfile` for details. - - .. attribute:: chunk_start - - A marker that indicates whether this data object is from the start of a - chunked transfer encoding chunk. This field is ignored when when a Data - event is provided to :meth:`Connection.send`: it is only valid on - events emitted from :meth:`Connection.next_event`. You probably - shouldn't use this attribute at all; see - :ref:`chunk-delimiters-are-bad` for details. - - .. attribute:: chunk_end - - A marker that indicates whether this data object is the last for a - given chunked transfer encoding chunk. This field is ignored when when - a Data event is provided to :meth:`Connection.send`: it is only valid - on events emitted from :meth:`Connection.next_event`. You probably - shouldn't use this attribute at all; see - :ref:`chunk-delimiters-are-bad` for details. - - """ - - __slots__ = ("data", "chunk_start", "chunk_end") - - data: bytes - chunk_start: bool - chunk_end: bool - - def __init__( - self, data: bytes, chunk_start: bool = False, chunk_end: bool = False - ) -> None: - object.__setattr__(self, "data", data) - object.__setattr__(self, "chunk_start", chunk_start) - object.__setattr__(self, "chunk_end", chunk_end) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -# XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that -# are forbidden to be sent in a trailer, since processing them as if they were -# present in the header section might bypass external security filters." -# https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part -# Unfortunately, the list of forbidden fields is long and vague :-/ -@dataclass(init=False, frozen=True) -class EndOfMessage(Event): - """The end of an HTTP message. - - Fields: - - .. attribute:: headers - - Default value: ``[]`` - - Any trailing headers attached to this message, represented as a list of - (name, value) pairs. See :ref:`the header normalization rules - ` for details. - - Must be empty unless ``Transfer-Encoding: chunked`` is in use. - - """ - - __slots__ = ("headers",) - - headers: Headers - - def __init__( - self, - *, - headers: Union[ - Headers, List[Tuple[bytes, bytes]], List[Tuple[str, str]], None - ] = None, - _parsed: bool = False, - ) -> None: - super().__init__() - if headers is None: - headers = Headers([]) - elif not isinstance(headers, Headers): - headers = normalize_and_validate(headers, _parsed=_parsed) - - object.__setattr__(self, "headers", headers) - - # This is an unhashable type. - __hash__ = None # type: ignore - - -@dataclass(frozen=True) -class ConnectionClosed(Event): - """This event indicates that the sender has closed their outgoing - connection. - - Note that this does not necessarily mean that they can't *receive* further - data, because TCP connections are composed to two one-way channels which - can be closed independently. See :ref:`closing` for details. - - No fields. - """ - - pass diff --git a/myenv/Lib/site-packages/h11/_headers.py b/myenv/Lib/site-packages/h11/_headers.py deleted file mode 100644 index b97d020..0000000 --- a/myenv/Lib/site-packages/h11/_headers.py +++ /dev/null @@ -1,278 +0,0 @@ -import re -from typing import AnyStr, cast, List, overload, Sequence, Tuple, TYPE_CHECKING, Union - -from ._abnf import field_name, field_value -from ._util import bytesify, LocalProtocolError, validate - -if TYPE_CHECKING: - from ._events import Request - -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal # type: ignore - - -# Facts -# ----- -# -# Headers are: -# keys: case-insensitive ascii -# values: mixture of ascii and raw bytes -# -# "Historically, HTTP has allowed field content with text in the ISO-8859-1 -# charset [ISO-8859-1], supporting other charsets only through use of -# [RFC2047] encoding. In practice, most HTTP header field values use only a -# subset of the US-ASCII charset [USASCII]. Newly defined header fields SHOULD -# limit their field values to US-ASCII octets. A recipient SHOULD treat other -# octets in field content (obs-text) as opaque data." -# And it deprecates all non-ascii values -# -# Leading/trailing whitespace in header names is forbidden -# -# Values get leading/trailing whitespace stripped -# -# Content-Disposition actually needs to contain unicode semantically; to -# accomplish this it has a terrifically weird way of encoding the filename -# itself as ascii (and even this still has lots of cross-browser -# incompatibilities) -# -# Order is important: -# "a proxy MUST NOT change the order of these field values when forwarding a -# message" -# (and there are several headers where the order indicates a preference) -# -# Multiple occurences of the same header: -# "A sender MUST NOT generate multiple header fields with the same field name -# in a message unless either the entire field value for that header field is -# defined as a comma-separated list [or the header is Set-Cookie which gets a -# special exception]" - RFC 7230. (cookies are in RFC 6265) -# -# So every header aside from Set-Cookie can be merged by b", ".join if it -# occurs repeatedly. But, of course, they can't necessarily be split by -# .split(b","), because quoting. -# -# Given all this mess (case insensitive, duplicates allowed, order is -# important, ...), there doesn't appear to be any standard way to handle -# headers in Python -- they're almost like dicts, but... actually just -# aren't. For now we punt and just use a super simple representation: headers -# are a list of pairs -# -# [(name1, value1), (name2, value2), ...] -# -# where all entries are bytestrings, names are lowercase and have no -# leading/trailing whitespace, and values are bytestrings with no -# leading/trailing whitespace. Searching and updating are done via naive O(n) -# methods. -# -# Maybe a dict-of-lists would be better? - -_content_length_re = re.compile(rb"[0-9]+") -_field_name_re = re.compile(field_name.encode("ascii")) -_field_value_re = re.compile(field_value.encode("ascii")) - - -class Headers(Sequence[Tuple[bytes, bytes]]): - """ - A list-like interface that allows iterating over headers as byte-pairs - of (lowercased-name, value). - - Internally we actually store the representation as three-tuples, - including both the raw original casing, in order to preserve casing - over-the-wire, and the lowercased name, for case-insensitive comparisions. - - r = Request( - method="GET", - target="/", - headers=[("Host", "example.org"), ("Connection", "keep-alive")], - http_version="1.1", - ) - assert r.headers == [ - (b"host", b"example.org"), - (b"connection", b"keep-alive") - ] - assert r.headers.raw_items() == [ - (b"Host", b"example.org"), - (b"Connection", b"keep-alive") - ] - """ - - __slots__ = "_full_items" - - def __init__(self, full_items: List[Tuple[bytes, bytes, bytes]]) -> None: - self._full_items = full_items - - def __bool__(self) -> bool: - return bool(self._full_items) - - def __eq__(self, other: object) -> bool: - return list(self) == list(other) # type: ignore - - def __len__(self) -> int: - return len(self._full_items) - - def __repr__(self) -> str: - return "" % repr(list(self)) - - def __getitem__(self, idx: int) -> Tuple[bytes, bytes]: # type: ignore[override] - _, name, value = self._full_items[idx] - return (name, value) - - def raw_items(self) -> List[Tuple[bytes, bytes]]: - return [(raw_name, value) for raw_name, _, value in self._full_items] - - -HeaderTypes = Union[ - List[Tuple[bytes, bytes]], - List[Tuple[bytes, str]], - List[Tuple[str, bytes]], - List[Tuple[str, str]], -] - - -@overload -def normalize_and_validate(headers: Headers, _parsed: Literal[True]) -> Headers: - ... - - -@overload -def normalize_and_validate(headers: HeaderTypes, _parsed: Literal[False]) -> Headers: - ... - - -@overload -def normalize_and_validate( - headers: Union[Headers, HeaderTypes], _parsed: bool = False -) -> Headers: - ... - - -def normalize_and_validate( - headers: Union[Headers, HeaderTypes], _parsed: bool = False -) -> Headers: - new_headers = [] - seen_content_length = None - saw_transfer_encoding = False - for name, value in headers: - # For headers coming out of the parser, we can safely skip some steps, - # because it always returns bytes and has already run these regexes - # over the data: - if not _parsed: - name = bytesify(name) - value = bytesify(value) - validate(_field_name_re, name, "Illegal header name {!r}", name) - validate(_field_value_re, value, "Illegal header value {!r}", value) - assert isinstance(name, bytes) - assert isinstance(value, bytes) - - raw_name = name - name = name.lower() - if name == b"content-length": - lengths = {length.strip() for length in value.split(b",")} - if len(lengths) != 1: - raise LocalProtocolError("conflicting Content-Length headers") - value = lengths.pop() - validate(_content_length_re, value, "bad Content-Length") - if seen_content_length is None: - seen_content_length = value - new_headers.append((raw_name, name, value)) - elif seen_content_length != value: - raise LocalProtocolError("conflicting Content-Length headers") - elif name == b"transfer-encoding": - # "A server that receives a request message with a transfer coding - # it does not understand SHOULD respond with 501 (Not - # Implemented)." - # https://tools.ietf.org/html/rfc7230#section-3.3.1 - if saw_transfer_encoding: - raise LocalProtocolError( - "multiple Transfer-Encoding headers", error_status_hint=501 - ) - # "All transfer-coding names are case-insensitive" - # -- https://tools.ietf.org/html/rfc7230#section-4 - value = value.lower() - if value != b"chunked": - raise LocalProtocolError( - "Only Transfer-Encoding: chunked is supported", - error_status_hint=501, - ) - saw_transfer_encoding = True - new_headers.append((raw_name, name, value)) - else: - new_headers.append((raw_name, name, value)) - return Headers(new_headers) - - -def get_comma_header(headers: Headers, name: bytes) -> List[bytes]: - # Should only be used for headers whose value is a list of - # comma-separated, case-insensitive values. - # - # The header name `name` is expected to be lower-case bytes. - # - # Connection: meets these criteria (including cast insensitivity). - # - # Content-Length: technically is just a single value (1*DIGIT), but the - # standard makes reference to implementations that do multiple values, and - # using this doesn't hurt. Ditto, case insensitivity doesn't things either - # way. - # - # Transfer-Encoding: is more complex (allows for quoted strings), so - # splitting on , is actually wrong. For example, this is legal: - # - # Transfer-Encoding: foo; options="1,2", chunked - # - # and should be parsed as - # - # foo; options="1,2" - # chunked - # - # but this naive function will parse it as - # - # foo; options="1 - # 2" - # chunked - # - # However, this is okay because the only thing we are going to do with - # any Transfer-Encoding is reject ones that aren't just "chunked", so - # both of these will be treated the same anyway. - # - # Expect: the only legal value is the literal string - # "100-continue". Splitting on commas is harmless. Case insensitive. - # - out: List[bytes] = [] - for _, found_name, found_raw_value in headers._full_items: - if found_name == name: - found_raw_value = found_raw_value.lower() - for found_split_value in found_raw_value.split(b","): - found_split_value = found_split_value.strip() - if found_split_value: - out.append(found_split_value) - return out - - -def set_comma_header(headers: Headers, name: bytes, new_values: List[bytes]) -> Headers: - # The header name `name` is expected to be lower-case bytes. - # - # Note that when we store the header we use title casing for the header - # names, in order to match the conventional HTTP header style. - # - # Simply calling `.title()` is a blunt approach, but it's correct - # here given the cases where we're using `set_comma_header`... - # - # Connection, Content-Length, Transfer-Encoding. - new_headers: List[Tuple[bytes, bytes]] = [] - for found_raw_name, found_name, found_raw_value in headers._full_items: - if found_name != name: - new_headers.append((found_raw_name, found_raw_value)) - for new_value in new_values: - new_headers.append((name.title(), new_value)) - return normalize_and_validate(new_headers) - - -def has_expect_100_continue(request: "Request") -> bool: - # https://tools.ietf.org/html/rfc7231#section-5.1.1 - # "A server that receives a 100-continue expectation in an HTTP/1.0 request - # MUST ignore that expectation." - if request.http_version < b"1.1": - return False - expect = get_comma_header(request.headers, b"expect") - return b"100-continue" in expect diff --git a/myenv/Lib/site-packages/h11/_readers.py b/myenv/Lib/site-packages/h11/_readers.py deleted file mode 100644 index 08a9574..0000000 --- a/myenv/Lib/site-packages/h11/_readers.py +++ /dev/null @@ -1,247 +0,0 @@ -# Code to read HTTP data -# -# Strategy: each reader is a callable which takes a ReceiveBuffer object, and -# either: -# 1) consumes some of it and returns an Event -# 2) raises a LocalProtocolError (for consistency -- e.g. we call validate() -# and it might raise a LocalProtocolError, so simpler just to always use -# this) -# 3) returns None, meaning "I need more data" -# -# If they have a .read_eof attribute, then this will be called if an EOF is -# received -- but this is optional. Either way, the actual ConnectionClosed -# event will be generated afterwards. -# -# READERS is a dict describing how to pick a reader. It maps states to either: -# - a reader -# - or, for body readers, a dict of per-framing reader factories - -import re -from typing import Any, Callable, Dict, Iterable, NoReturn, Optional, Tuple, Type, Union - -from ._abnf import chunk_header, header_field, request_line, status_line -from ._events import Data, EndOfMessage, InformationalResponse, Request, Response -from ._receivebuffer import ReceiveBuffer -from ._state import ( - CLIENT, - CLOSED, - DONE, - IDLE, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, -) -from ._util import LocalProtocolError, RemoteProtocolError, Sentinel, validate - -__all__ = ["READERS"] - -header_field_re = re.compile(header_field.encode("ascii")) -obs_fold_re = re.compile(rb"[ \t]+") - - -def _obsolete_line_fold(lines: Iterable[bytes]) -> Iterable[bytes]: - it = iter(lines) - last: Optional[bytes] = None - for line in it: - match = obs_fold_re.match(line) - if match: - if last is None: - raise LocalProtocolError("continuation line at start of headers") - if not isinstance(last, bytearray): - # Cast to a mutable type, avoiding copy on append to ensure O(n) time - last = bytearray(last) - last += b" " - last += line[match.end() :] - else: - if last is not None: - yield last - last = line - if last is not None: - yield last - - -def _decode_header_lines( - lines: Iterable[bytes], -) -> Iterable[Tuple[bytes, bytes]]: - for line in _obsolete_line_fold(lines): - matches = validate(header_field_re, line, "illegal header line: {!r}", line) - yield (matches["field_name"], matches["field_value"]) - - -request_line_re = re.compile(request_line.encode("ascii")) - - -def maybe_read_from_IDLE_client(buf: ReceiveBuffer) -> Optional[Request]: - lines = buf.maybe_extract_lines() - if lines is None: - if buf.is_next_line_obviously_invalid_request_line(): - raise LocalProtocolError("illegal request line") - return None - if not lines: - raise LocalProtocolError("no request line received") - matches = validate( - request_line_re, lines[0], "illegal request line: {!r}", lines[0] - ) - return Request( - headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches - ) - - -status_line_re = re.compile(status_line.encode("ascii")) - - -def maybe_read_from_SEND_RESPONSE_server( - buf: ReceiveBuffer, -) -> Union[InformationalResponse, Response, None]: - lines = buf.maybe_extract_lines() - if lines is None: - if buf.is_next_line_obviously_invalid_request_line(): - raise LocalProtocolError("illegal request line") - return None - if not lines: - raise LocalProtocolError("no response line received") - matches = validate(status_line_re, lines[0], "illegal status line: {!r}", lines[0]) - http_version = ( - b"1.1" if matches["http_version"] is None else matches["http_version"] - ) - reason = b"" if matches["reason"] is None else matches["reason"] - status_code = int(matches["status_code"]) - class_: Union[Type[InformationalResponse], Type[Response]] = ( - InformationalResponse if status_code < 200 else Response - ) - return class_( - headers=list(_decode_header_lines(lines[1:])), - _parsed=True, - status_code=status_code, - reason=reason, - http_version=http_version, - ) - - -class ContentLengthReader: - def __init__(self, length: int) -> None: - self._length = length - self._remaining = length - - def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: - if self._remaining == 0: - return EndOfMessage() - data = buf.maybe_extract_at_most(self._remaining) - if data is None: - return None - self._remaining -= len(data) - return Data(data=data) - - def read_eof(self) -> NoReturn: - raise RemoteProtocolError( - "peer closed connection without sending complete message body " - "(received {} bytes, expected {})".format( - self._length - self._remaining, self._length - ) - ) - - -chunk_header_re = re.compile(chunk_header.encode("ascii")) - - -class ChunkedReader: - def __init__(self) -> None: - self._bytes_in_chunk = 0 - # After reading a chunk, we have to throw away the trailing \r\n; if - # this is >0 then we discard that many bytes before resuming regular - # de-chunkification. - self._bytes_to_discard = 0 - self._reading_trailer = False - - def __call__(self, buf: ReceiveBuffer) -> Union[Data, EndOfMessage, None]: - if self._reading_trailer: - lines = buf.maybe_extract_lines() - if lines is None: - return None - return EndOfMessage(headers=list(_decode_header_lines(lines))) - if self._bytes_to_discard > 0: - data = buf.maybe_extract_at_most(self._bytes_to_discard) - if data is None: - return None - self._bytes_to_discard -= len(data) - if self._bytes_to_discard > 0: - return None - # else, fall through and read some more - assert self._bytes_to_discard == 0 - if self._bytes_in_chunk == 0: - # We need to refill our chunk count - chunk_header = buf.maybe_extract_next_line() - if chunk_header is None: - return None - matches = validate( - chunk_header_re, - chunk_header, - "illegal chunk header: {!r}", - chunk_header, - ) - # XX FIXME: we discard chunk extensions. Does anyone care? - self._bytes_in_chunk = int(matches["chunk_size"], base=16) - if self._bytes_in_chunk == 0: - self._reading_trailer = True - return self(buf) - chunk_start = True - else: - chunk_start = False - assert self._bytes_in_chunk > 0 - data = buf.maybe_extract_at_most(self._bytes_in_chunk) - if data is None: - return None - self._bytes_in_chunk -= len(data) - if self._bytes_in_chunk == 0: - self._bytes_to_discard = 2 - chunk_end = True - else: - chunk_end = False - return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) - - def read_eof(self) -> NoReturn: - raise RemoteProtocolError( - "peer closed connection without sending complete message body " - "(incomplete chunked read)" - ) - - -class Http10Reader: - def __call__(self, buf: ReceiveBuffer) -> Optional[Data]: - data = buf.maybe_extract_at_most(999999999) - if data is None: - return None - return Data(data=data) - - def read_eof(self) -> EndOfMessage: - return EndOfMessage() - - -def expect_nothing(buf: ReceiveBuffer) -> None: - if buf: - raise LocalProtocolError("Got data when expecting EOF") - return None - - -ReadersType = Dict[ - Union[Type[Sentinel], Tuple[Type[Sentinel], Type[Sentinel]]], - Union[Callable[..., Any], Dict[str, Callable[..., Any]]], -] - -READERS: ReadersType = { - (CLIENT, IDLE): maybe_read_from_IDLE_client, - (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, - (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, - (CLIENT, DONE): expect_nothing, - (CLIENT, MUST_CLOSE): expect_nothing, - (CLIENT, CLOSED): expect_nothing, - (SERVER, DONE): expect_nothing, - (SERVER, MUST_CLOSE): expect_nothing, - (SERVER, CLOSED): expect_nothing, - SEND_BODY: { - "chunked": ChunkedReader, - "content-length": ContentLengthReader, - "http/1.0": Http10Reader, - }, -} diff --git a/myenv/Lib/site-packages/h11/_receivebuffer.py b/myenv/Lib/site-packages/h11/_receivebuffer.py deleted file mode 100644 index e5c4e08..0000000 --- a/myenv/Lib/site-packages/h11/_receivebuffer.py +++ /dev/null @@ -1,153 +0,0 @@ -import re -import sys -from typing import List, Optional, Union - -__all__ = ["ReceiveBuffer"] - - -# Operations we want to support: -# - find next \r\n or \r\n\r\n (\n or \n\n are also acceptable), -# or wait until there is one -# - read at-most-N bytes -# Goals: -# - on average, do this fast -# - worst case, do this in O(n) where n is the number of bytes processed -# Plan: -# - store bytearray, offset, how far we've searched for a separator token -# - use the how-far-we've-searched data to avoid rescanning -# - while doing a stream of uninterrupted processing, advance offset instead -# of constantly copying -# WARNING: -# - I haven't benchmarked or profiled any of this yet. -# -# Note that starting in Python 3.4, deleting the initial n bytes from a -# bytearray is amortized O(n), thanks to some excellent work by Antoine -# Martin: -# -# https://bugs.python.org/issue19087 -# -# This means that if we only supported 3.4+, we could get rid of the code here -# involving self._start and self.compress, because it's doing exactly the same -# thing that bytearray now does internally. -# -# BUT unfortunately, we still support 2.7, and reading short segments out of a -# long buffer MUST be O(bytes read) to avoid DoS issues, so we can't actually -# delete this code. Yet: -# -# https://pythonclock.org/ -# -# (Two things to double-check first though: make sure PyPy also has the -# optimization, and benchmark to make sure it's a win, since we do have a -# slightly clever thing where we delay calling compress() until we've -# processed a whole event, which could in theory be slightly more efficient -# than the internal bytearray support.) -blank_line_regex = re.compile(b"\n\r?\n", re.MULTILINE) - - -class ReceiveBuffer: - def __init__(self) -> None: - self._data = bytearray() - self._next_line_search = 0 - self._multiple_lines_search = 0 - - def __iadd__(self, byteslike: Union[bytes, bytearray]) -> "ReceiveBuffer": - self._data += byteslike - return self - - def __bool__(self) -> bool: - return bool(len(self)) - - def __len__(self) -> int: - return len(self._data) - - # for @property unprocessed_data - def __bytes__(self) -> bytes: - return bytes(self._data) - - def _extract(self, count: int) -> bytearray: - # extracting an initial slice of the data buffer and return it - out = self._data[:count] - del self._data[:count] - - self._next_line_search = 0 - self._multiple_lines_search = 0 - - return out - - def maybe_extract_at_most(self, count: int) -> Optional[bytearray]: - """ - Extract a fixed number of bytes from the buffer. - """ - out = self._data[:count] - if not out: - return None - - return self._extract(count) - - def maybe_extract_next_line(self) -> Optional[bytearray]: - """ - Extract the first line, if it is completed in the buffer. - """ - # Only search in buffer space that we've not already looked at. - search_start_index = max(0, self._next_line_search - 1) - partial_idx = self._data.find(b"\r\n", search_start_index) - - if partial_idx == -1: - self._next_line_search = len(self._data) - return None - - # + 2 is to compensate len(b"\r\n") - idx = partial_idx + 2 - - return self._extract(idx) - - def maybe_extract_lines(self) -> Optional[List[bytearray]]: - """ - Extract everything up to the first blank line, and return a list of lines. - """ - # Handle the case where we have an immediate empty line. - if self._data[:1] == b"\n": - self._extract(1) - return [] - - if self._data[:2] == b"\r\n": - self._extract(2) - return [] - - # Only search in buffer space that we've not already looked at. - match = blank_line_regex.search(self._data, self._multiple_lines_search) - if match is None: - self._multiple_lines_search = max(0, len(self._data) - 2) - return None - - # Truncate the buffer and return it. - idx = match.span(0)[-1] - out = self._extract(idx) - lines = out.split(b"\n") - - for line in lines: - if line.endswith(b"\r"): - del line[-1] - - assert lines[-2] == lines[-1] == b"" - - del lines[-2:] - - return lines - - # In theory we should wait until `\r\n` before starting to validate - # incoming data. However it's interesting to detect (very) invalid data - # early given they might not even contain `\r\n` at all (hence only - # timeout will get rid of them). - # This is not a 100% effective detection but more of a cheap sanity check - # allowing for early abort in some useful cases. - # This is especially interesting when peer is messing up with HTTPS and - # sent us a TLS stream where we were expecting plain HTTP given all - # versions of TLS so far start handshake with a 0x16 message type code. - def is_next_line_obviously_invalid_request_line(self) -> bool: - try: - # HTTP header line must not contain non-printable characters - # and should not start with a space - return self._data[0] < 0x21 - except IndexError: - return False diff --git a/myenv/Lib/site-packages/h11/_state.py b/myenv/Lib/site-packages/h11/_state.py deleted file mode 100644 index 3593430..0000000 --- a/myenv/Lib/site-packages/h11/_state.py +++ /dev/null @@ -1,367 +0,0 @@ -################################################################ -# The core state machine -################################################################ -# -# Rule 1: everything that affects the state machine and state transitions must -# live here in this file. As much as possible goes into the table-based -# representation, but for the bits that don't quite fit, the actual code and -# state must nonetheless live here. -# -# Rule 2: this file does not know about what role we're playing; it only knows -# about HTTP request/response cycles in the abstract. This ensures that we -# don't cheat and apply different rules to local and remote parties. -# -# -# Theory of operation -# =================== -# -# Possibly the simplest way to think about this is that we actually have 5 -# different state machines here. Yes, 5. These are: -# -# 1) The client state, with its complicated automaton (see the docs) -# 2) The server state, with its complicated automaton (see the docs) -# 3) The keep-alive state, with possible states {True, False} -# 4) The SWITCH_CONNECT state, with possible states {False, True} -# 5) The SWITCH_UPGRADE state, with possible states {False, True} -# -# For (3)-(5), the first state listed is the initial state. -# -# (1)-(3) are stored explicitly in member variables. The last -# two are stored implicitly in the pending_switch_proposals set as: -# (state of 4) == (_SWITCH_CONNECT in pending_switch_proposals) -# (state of 5) == (_SWITCH_UPGRADE in pending_switch_proposals) -# -# And each of these machines has two different kinds of transitions: -# -# a) Event-triggered -# b) State-triggered -# -# Event triggered is the obvious thing that you'd think it is: some event -# happens, and if it's the right event at the right time then a transition -# happens. But there are somewhat complicated rules for which machines can -# "see" which events. (As a rule of thumb, if a machine "sees" an event, this -# means two things: the event can affect the machine, and if the machine is -# not in a state where it expects that event then it's an error.) These rules -# are: -# -# 1) The client machine sees all h11.events objects emitted by the client. -# -# 2) The server machine sees all h11.events objects emitted by the server. -# -# It also sees the client's Request event. -# -# And sometimes, server events are annotated with a _SWITCH_* event. For -# example, we can have a (Response, _SWITCH_CONNECT) event, which is -# different from a regular Response event. -# -# 3) The keep-alive machine sees the process_keep_alive_disabled() event -# (which is derived from Request/Response events), and this event -# transitions it from True -> False, or from False -> False. There's no way -# to transition back. -# -# 4&5) The _SWITCH_* machines transition from False->True when we get a -# Request that proposes the relevant type of switch (via -# process_client_switch_proposals), and they go from True->False when we -# get a Response that has no _SWITCH_* annotation. -# -# So that's event-triggered transitions. -# -# State-triggered transitions are less standard. What they do here is couple -# the machines together. The way this works is, when certain *joint* -# configurations of states are achieved, then we automatically transition to a -# new *joint* state. So, for example, if we're ever in a joint state with -# -# client: DONE -# keep-alive: False -# -# then the client state immediately transitions to: -# -# client: MUST_CLOSE -# -# This is fundamentally different from an event-based transition, because it -# doesn't matter how we arrived at the {client: DONE, keep-alive: False} state -# -- maybe the client transitioned SEND_BODY -> DONE, or keep-alive -# transitioned True -> False. Either way, once this precondition is satisfied, -# this transition is immediately triggered. -# -# What if two conflicting state-based transitions get enabled at the same -# time? In practice there's only one case where this arises (client DONE -> -# MIGHT_SWITCH_PROTOCOL versus DONE -> MUST_CLOSE), and we resolve it by -# explicitly prioritizing the DONE -> MIGHT_SWITCH_PROTOCOL transition. -# -# Implementation -# -------------- -# -# The event-triggered transitions for the server and client machines are all -# stored explicitly in a table. Ditto for the state-triggered transitions that -# involve just the server and client state. -# -# The transitions for the other machines, and the state-triggered transitions -# that involve the other machines, are written out as explicit Python code. -# -# It'd be nice if there were some cleaner way to do all this. This isn't -# *too* terrible, but I feel like it could probably be better. -# -# WARNING -# ------- -# -# The script that generates the state machine diagrams for the docs knows how -# to read out the EVENT_TRIGGERED_TRANSITIONS and STATE_TRIGGERED_TRANSITIONS -# tables. But it can't automatically read the transitions that are written -# directly in Python code. So if you touch those, you need to also update the -# script to keep it in sync! -from typing import cast, Dict, Optional, Set, Tuple, Type, Union - -from ._events import * -from ._util import LocalProtocolError, Sentinel - -# Everything in __all__ gets re-exported as part of the h11 public API. -__all__ = [ - "CLIENT", - "SERVER", - "IDLE", - "SEND_RESPONSE", - "SEND_BODY", - "DONE", - "MUST_CLOSE", - "CLOSED", - "MIGHT_SWITCH_PROTOCOL", - "SWITCHED_PROTOCOL", - "ERROR", -] - - -class CLIENT(Sentinel, metaclass=Sentinel): - pass - - -class SERVER(Sentinel, metaclass=Sentinel): - pass - - -# States -class IDLE(Sentinel, metaclass=Sentinel): - pass - - -class SEND_RESPONSE(Sentinel, metaclass=Sentinel): - pass - - -class SEND_BODY(Sentinel, metaclass=Sentinel): - pass - - -class DONE(Sentinel, metaclass=Sentinel): - pass - - -class MUST_CLOSE(Sentinel, metaclass=Sentinel): - pass - - -class CLOSED(Sentinel, metaclass=Sentinel): - pass - - -class ERROR(Sentinel, metaclass=Sentinel): - pass - - -# Switch types -class MIGHT_SWITCH_PROTOCOL(Sentinel, metaclass=Sentinel): - pass - - -class SWITCHED_PROTOCOL(Sentinel, metaclass=Sentinel): - pass - - -class _SWITCH_UPGRADE(Sentinel, metaclass=Sentinel): - pass - - -class _SWITCH_CONNECT(Sentinel, metaclass=Sentinel): - pass - - -EventTransitionType = Dict[ - Type[Sentinel], - Dict[ - Type[Sentinel], - Dict[Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], Type[Sentinel]], - ], -] - -EVENT_TRIGGERED_TRANSITIONS: EventTransitionType = { - CLIENT: { - IDLE: {Request: SEND_BODY, ConnectionClosed: CLOSED}, - SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, - DONE: {ConnectionClosed: CLOSED}, - MUST_CLOSE: {ConnectionClosed: CLOSED}, - CLOSED: {ConnectionClosed: CLOSED}, - MIGHT_SWITCH_PROTOCOL: {}, - SWITCHED_PROTOCOL: {}, - ERROR: {}, - }, - SERVER: { - IDLE: { - ConnectionClosed: CLOSED, - Response: SEND_BODY, - # Special case: server sees client Request events, in this form - (Request, CLIENT): SEND_RESPONSE, - }, - SEND_RESPONSE: { - InformationalResponse: SEND_RESPONSE, - Response: SEND_BODY, - (InformationalResponse, _SWITCH_UPGRADE): SWITCHED_PROTOCOL, - (Response, _SWITCH_CONNECT): SWITCHED_PROTOCOL, - }, - SEND_BODY: {Data: SEND_BODY, EndOfMessage: DONE}, - DONE: {ConnectionClosed: CLOSED}, - MUST_CLOSE: {ConnectionClosed: CLOSED}, - CLOSED: {ConnectionClosed: CLOSED}, - SWITCHED_PROTOCOL: {}, - ERROR: {}, - }, -} - -StateTransitionType = Dict[ - Tuple[Type[Sentinel], Type[Sentinel]], Dict[Type[Sentinel], Type[Sentinel]] -] - -# NB: there are also some special-case state-triggered transitions hard-coded -# into _fire_state_triggered_transitions below. -STATE_TRIGGERED_TRANSITIONS: StateTransitionType = { - # (Client state, Server state) -> new states - # Protocol negotiation - (MIGHT_SWITCH_PROTOCOL, SWITCHED_PROTOCOL): {CLIENT: SWITCHED_PROTOCOL}, - # Socket shutdown - (CLOSED, DONE): {SERVER: MUST_CLOSE}, - (CLOSED, IDLE): {SERVER: MUST_CLOSE}, - (ERROR, DONE): {SERVER: MUST_CLOSE}, - (DONE, CLOSED): {CLIENT: MUST_CLOSE}, - (IDLE, CLOSED): {CLIENT: MUST_CLOSE}, - (DONE, ERROR): {CLIENT: MUST_CLOSE}, -} - - -class ConnectionState: - def __init__(self) -> None: - # Extra bits of state that don't quite fit into the state model. - - # If this is False then it enables the automatic DONE -> MUST_CLOSE - # transition. Don't set this directly; call .keep_alive_disabled() - self.keep_alive = True - - # This is a subset of {UPGRADE, CONNECT}, containing the proposals - # made by the client for switching protocols. - self.pending_switch_proposals: Set[Type[Sentinel]] = set() - - self.states: Dict[Type[Sentinel], Type[Sentinel]] = {CLIENT: IDLE, SERVER: IDLE} - - def process_error(self, role: Type[Sentinel]) -> None: - self.states[role] = ERROR - self._fire_state_triggered_transitions() - - def process_keep_alive_disabled(self) -> None: - self.keep_alive = False - self._fire_state_triggered_transitions() - - def process_client_switch_proposal(self, switch_event: Type[Sentinel]) -> None: - self.pending_switch_proposals.add(switch_event) - self._fire_state_triggered_transitions() - - def process_event( - self, - role: Type[Sentinel], - event_type: Type[Event], - server_switch_event: Optional[Type[Sentinel]] = None, - ) -> None: - _event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]] = event_type - if server_switch_event is not None: - assert role is SERVER - if server_switch_event not in self.pending_switch_proposals: - raise LocalProtocolError( - "Received server {} event without a pending proposal".format( - server_switch_event - ) - ) - _event_type = (event_type, server_switch_event) - if server_switch_event is None and _event_type is Response: - self.pending_switch_proposals = set() - self._fire_event_triggered_transitions(role, _event_type) - # Special case: the server state does get to see Request - # events. - if _event_type is Request: - assert role is CLIENT - self._fire_event_triggered_transitions(SERVER, (Request, CLIENT)) - self._fire_state_triggered_transitions() - - def _fire_event_triggered_transitions( - self, - role: Type[Sentinel], - event_type: Union[Type[Event], Tuple[Type[Event], Type[Sentinel]]], - ) -> None: - state = self.states[role] - try: - new_state = EVENT_TRIGGERED_TRANSITIONS[role][state][event_type] - except KeyError: - event_type = cast(Type[Event], event_type) - raise LocalProtocolError( - "can't handle event type {} when role={} and state={}".format( - event_type.__name__, role, self.states[role] - ) - ) from None - self.states[role] = new_state - - def _fire_state_triggered_transitions(self) -> None: - # We apply these rules repeatedly until converging on a fixed point - while True: - start_states = dict(self.states) - - # It could happen that both these special-case transitions are - # enabled at the same time: - # - # DONE -> MIGHT_SWITCH_PROTOCOL - # DONE -> MUST_CLOSE - # - # For example, this will always be true of a HTTP/1.0 client - # requesting CONNECT. If this happens, the protocol switch takes - # priority. From there the client will either go to - # SWITCHED_PROTOCOL, in which case it's none of our business when - # they close the connection, or else the server will deny the - # request, in which case the client will go back to DONE and then - # from there to MUST_CLOSE. - if self.pending_switch_proposals: - if self.states[CLIENT] is DONE: - self.states[CLIENT] = MIGHT_SWITCH_PROTOCOL - - if not self.pending_switch_proposals: - if self.states[CLIENT] is MIGHT_SWITCH_PROTOCOL: - self.states[CLIENT] = DONE - - if not self.keep_alive: - for role in (CLIENT, SERVER): - if self.states[role] is DONE: - self.states[role] = MUST_CLOSE - - # Tabular state-triggered transitions - joint_state = (self.states[CLIENT], self.states[SERVER]) - changes = STATE_TRIGGERED_TRANSITIONS.get(joint_state, {}) - self.states.update(changes) - - if self.states == start_states: - # Fixed point reached - return - - def start_next_cycle(self) -> None: - if self.states != {CLIENT: DONE, SERVER: DONE}: - raise LocalProtocolError( - "not in a reusable state. self.states={}".format(self.states) - ) - # Can't reach DONE/DONE with any of these active, but still, let's be - # sure. - assert self.keep_alive - assert not self.pending_switch_proposals - self.states = {CLIENT: IDLE, SERVER: IDLE} diff --git a/myenv/Lib/site-packages/h11/_util.py b/myenv/Lib/site-packages/h11/_util.py deleted file mode 100644 index 6718445..0000000 --- a/myenv/Lib/site-packages/h11/_util.py +++ /dev/null @@ -1,135 +0,0 @@ -from typing import Any, Dict, NoReturn, Pattern, Tuple, Type, TypeVar, Union - -__all__ = [ - "ProtocolError", - "LocalProtocolError", - "RemoteProtocolError", - "validate", - "bytesify", -] - - -class ProtocolError(Exception): - """Exception indicating a violation of the HTTP/1.1 protocol. - - This as an abstract base class, with two concrete base classes: - :exc:`LocalProtocolError`, which indicates that you tried to do something - that HTTP/1.1 says is illegal, and :exc:`RemoteProtocolError`, which - indicates that the remote peer tried to do something that HTTP/1.1 says is - illegal. See :ref:`error-handling` for details. - - In addition to the normal :exc:`Exception` features, it has one attribute: - - .. attribute:: error_status_hint - - This gives a suggestion as to what status code a server might use if - this error occurred as part of a request. - - For a :exc:`RemoteProtocolError`, this is useful as a suggestion for - how you might want to respond to a misbehaving peer, if you're - implementing a server. - - For a :exc:`LocalProtocolError`, this can be taken as a suggestion for - how your peer might have responded to *you* if h11 had allowed you to - continue. - - The default is 400 Bad Request, a generic catch-all for protocol - violations. - - """ - - def __init__(self, msg: str, error_status_hint: int = 400) -> None: - if type(self) is ProtocolError: - raise TypeError("tried to directly instantiate ProtocolError") - Exception.__init__(self, msg) - self.error_status_hint = error_status_hint - - -# Strategy: there are a number of public APIs where a LocalProtocolError can -# be raised (send(), all the different event constructors, ...), and only one -# public API where RemoteProtocolError can be raised -# (receive_data()). Therefore we always raise LocalProtocolError internally, -# and then receive_data will translate this into a RemoteProtocolError. -# -# Internally: -# LocalProtocolError is the generic "ProtocolError". -# Externally: -# LocalProtocolError is for local errors and RemoteProtocolError is for -# remote errors. -class LocalProtocolError(ProtocolError): - def _reraise_as_remote_protocol_error(self) -> NoReturn: - # After catching a LocalProtocolError, use this method to re-raise it - # as a RemoteProtocolError. This method must be called from inside an - # except: block. - # - # An easy way to get an equivalent RemoteProtocolError is just to - # modify 'self' in place. - self.__class__ = RemoteProtocolError # type: ignore - # But the re-raising is somewhat non-trivial -- you might think that - # now that we've modified the in-flight exception object, that just - # doing 'raise' to re-raise it would be enough. But it turns out that - # this doesn't work, because Python tracks the exception type - # (exc_info[0]) separately from the exception object (exc_info[1]), - # and we only modified the latter. So we really do need to re-raise - # the new type explicitly. - # On py3, the traceback is part of the exception object, so our - # in-place modification preserved it and we can just re-raise: - raise self - - -class RemoteProtocolError(ProtocolError): - pass - - -def validate( - regex: Pattern[bytes], data: bytes, msg: str = "malformed data", *format_args: Any -) -> Dict[str, bytes]: - match = regex.fullmatch(data) - if not match: - if format_args: - msg = msg.format(*format_args) - raise LocalProtocolError(msg) - return match.groupdict() - - -# Sentinel values -# -# - Inherit identity-based comparison and hashing from object -# - Have a nice repr -# - Have a *bonus property*: type(sentinel) is sentinel -# -# The bonus property is useful if you want to take the return value from -# next_event() and do some sort of dispatch based on type(event). - -_T_Sentinel = TypeVar("_T_Sentinel", bound="Sentinel") - - -class Sentinel(type): - def __new__( - cls: Type[_T_Sentinel], - name: str, - bases: Tuple[type, ...], - namespace: Dict[str, Any], - **kwds: Any - ) -> _T_Sentinel: - assert bases == (Sentinel,) - v = super().__new__(cls, name, bases, namespace, **kwds) - v.__class__ = v # type: ignore - return v - - def __repr__(self) -> str: - return self.__name__ - - -# Used for methods, request targets, HTTP versions, header names, and header -# values. Accepts ascii-strings, or bytes/bytearray/memoryview/..., and always -# returns bytes. -def bytesify(s: Union[bytes, bytearray, memoryview, int, str]) -> bytes: - # Fast-path: - if type(s) is bytes: - return s - if isinstance(s, str): - s = s.encode("ascii") - if isinstance(s, int): - raise TypeError("expected bytes-like object, not int") - return bytes(s) diff --git a/myenv/Lib/site-packages/h11/_version.py b/myenv/Lib/site-packages/h11/_version.py deleted file mode 100644 index 4c89113..0000000 --- a/myenv/Lib/site-packages/h11/_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# This file must be kept very simple, because it is consumed from several -# places -- it is imported by h11/__init__.py, execfile'd by setup.py, etc. - -# We use a simple scheme: -# 1.0.0 -> 1.0.0+dev -> 1.1.0 -> 1.1.0+dev -# where the +dev versions are never released into the wild, they're just what -# we stick into the VCS in between releases. -# -# This is compatible with PEP 440: -# http://legacy.python.org/dev/peps/pep-0440/ -# via the use of the "local suffix" "+dev", which is disallowed on index -# servers and causes 1.0.0+dev to sort after plain 1.0.0, which is what we -# want. (Contrast with the special suffix 1.0.0.dev, which sorts *before* -# 1.0.0.) - -__version__ = "0.14.0" diff --git a/myenv/Lib/site-packages/h11/_writers.py b/myenv/Lib/site-packages/h11/_writers.py deleted file mode 100644 index 939cdb9..0000000 --- a/myenv/Lib/site-packages/h11/_writers.py +++ /dev/null @@ -1,145 +0,0 @@ -# Code to read HTTP data -# -# Strategy: each writer takes an event + a write-some-bytes function, which is -# calls. -# -# WRITERS is a dict describing how to pick a reader. It maps states to either: -# - a writer -# - or, for body writers, a dict of framin-dependent writer factories - -from typing import Any, Callable, Dict, List, Tuple, Type, Union - -from ._events import Data, EndOfMessage, Event, InformationalResponse, Request, Response -from ._headers import Headers -from ._state import CLIENT, IDLE, SEND_BODY, SEND_RESPONSE, SERVER -from ._util import LocalProtocolError, Sentinel - -__all__ = ["WRITERS"] - -Writer = Callable[[bytes], Any] - - -def write_headers(headers: Headers, write: Writer) -> None: - # "Since the Host field-value is critical information for handling a - # request, a user agent SHOULD generate Host as the first header field - # following the request-line." - RFC 7230 - raw_items = headers._full_items - for raw_name, name, value in raw_items: - if name == b"host": - write(b"%s: %s\r\n" % (raw_name, value)) - for raw_name, name, value in raw_items: - if name != b"host": - write(b"%s: %s\r\n" % (raw_name, value)) - write(b"\r\n") - - -def write_request(request: Request, write: Writer) -> None: - if request.http_version != b"1.1": - raise LocalProtocolError("I only send HTTP/1.1") - write(b"%s %s HTTP/1.1\r\n" % (request.method, request.target)) - write_headers(request.headers, write) - - -# Shared between InformationalResponse and Response -def write_any_response( - response: Union[InformationalResponse, Response], write: Writer -) -> None: - if response.http_version != b"1.1": - raise LocalProtocolError("I only send HTTP/1.1") - status_bytes = str(response.status_code).encode("ascii") - # We don't bother sending ascii status messages like "OK"; they're - # optional and ignored by the protocol. (But the space after the numeric - # status code is mandatory.) - # - # XX FIXME: could at least make an effort to pull out the status message - # from stdlib's http.HTTPStatus table. Or maybe just steal their enums - # (either by import or copy/paste). We already accept them as status codes - # since they're of type IntEnum < int. - write(b"HTTP/1.1 %s %s\r\n" % (status_bytes, response.reason)) - write_headers(response.headers, write) - - -class BodyWriter: - def __call__(self, event: Event, write: Writer) -> None: - if type(event) is Data: - self.send_data(event.data, write) - elif type(event) is EndOfMessage: - self.send_eom(event.headers, write) - else: # pragma: no cover - assert False - - def send_data(self, data: bytes, write: Writer) -> None: - pass - - def send_eom(self, headers: Headers, write: Writer) -> None: - pass - - -# -# These are all careful not to do anything to 'data' except call len(data) and -# write(data). This allows us to transparently pass-through funny objects, -# like placeholder objects referring to files on disk that will be sent via -# sendfile(2). -# -class ContentLengthWriter(BodyWriter): - def __init__(self, length: int) -> None: - self._length = length - - def send_data(self, data: bytes, write: Writer) -> None: - self._length -= len(data) - if self._length < 0: - raise LocalProtocolError("Too much data for declared Content-Length") - write(data) - - def send_eom(self, headers: Headers, write: Writer) -> None: - if self._length != 0: - raise LocalProtocolError("Too little data for declared Content-Length") - if headers: - raise LocalProtocolError("Content-Length and trailers don't mix") - - -class ChunkedWriter(BodyWriter): - def send_data(self, data: bytes, write: Writer) -> None: - # if we encoded 0-length data in the naive way, it would look like an - # end-of-message. - if not data: - return - write(b"%x\r\n" % len(data)) - write(data) - write(b"\r\n") - - def send_eom(self, headers: Headers, write: Writer) -> None: - write(b"0\r\n") - write_headers(headers, write) - - -class Http10Writer(BodyWriter): - def send_data(self, data: bytes, write: Writer) -> None: - write(data) - - def send_eom(self, headers: Headers, write: Writer) -> None: - if headers: - raise LocalProtocolError("can't send trailers to HTTP/1.0 client") - # no need to close the socket ourselves, that will be taken care of by - # Connection: close machinery - - -WritersType = Dict[ - Union[Tuple[Type[Sentinel], Type[Sentinel]], Type[Sentinel]], - Union[ - Dict[str, Type[BodyWriter]], - Callable[[Union[InformationalResponse, Response], Writer], None], - Callable[[Request, Writer], None], - ], -] - -WRITERS: WritersType = { - (CLIENT, IDLE): write_request, - (SERVER, IDLE): write_any_response, - (SERVER, SEND_RESPONSE): write_any_response, - SEND_BODY: { - "chunked": ChunkedWriter, - "content-length": ContentLengthWriter, - "http/1.0": Http10Writer, - }, -} diff --git a/myenv/Lib/site-packages/h11/py.typed b/myenv/Lib/site-packages/h11/py.typed deleted file mode 100644 index f5642f7..0000000 --- a/myenv/Lib/site-packages/h11/py.typed +++ /dev/null @@ -1 +0,0 @@ -Marker diff --git a/myenv/Lib/site-packages/h11/tests/__init__.py b/myenv/Lib/site-packages/h11/tests/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/h11/tests/data/test-file b/myenv/Lib/site-packages/h11/tests/data/test-file deleted file mode 100644 index d0be0a6..0000000 --- a/myenv/Lib/site-packages/h11/tests/data/test-file +++ /dev/null @@ -1 +0,0 @@ -92b12bc045050b55b848d37167a1a63947c364579889ce1d39788e45e9fac9e5 diff --git a/myenv/Lib/site-packages/h11/tests/helpers.py b/myenv/Lib/site-packages/h11/tests/helpers.py deleted file mode 100644 index 571be44..0000000 --- a/myenv/Lib/site-packages/h11/tests/helpers.py +++ /dev/null @@ -1,101 +0,0 @@ -from typing import cast, List, Type, Union, ValuesView - -from .._connection import Connection, NEED_DATA, PAUSED -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .._state import CLIENT, CLOSED, DONE, MUST_CLOSE, SERVER -from .._util import Sentinel - -try: - from typing import Literal -except ImportError: - from typing_extensions import Literal # type: ignore - - -def get_all_events(conn: Connection) -> List[Event]: - got_events = [] - while True: - event = conn.next_event() - if event in (NEED_DATA, PAUSED): - break - event = cast(Event, event) - got_events.append(event) - if type(event) is ConnectionClosed: - break - return got_events - - -def receive_and_get(conn: Connection, data: bytes) -> List[Event]: - conn.receive_data(data) - return get_all_events(conn) - - -# Merges adjacent Data events, converts payloads to bytestrings, and removes -# chunk boundaries. -def normalize_data_events(in_events: List[Event]) -> List[Event]: - out_events: List[Event] = [] - for event in in_events: - if type(event) is Data: - event = Data(data=bytes(event.data), chunk_start=False, chunk_end=False) - if out_events and type(out_events[-1]) is type(event) is Data: - out_events[-1] = Data( - data=out_events[-1].data + event.data, - chunk_start=out_events[-1].chunk_start, - chunk_end=out_events[-1].chunk_end, - ) - else: - out_events.append(event) - return out_events - - -# Given that we want to write tests that push some events through a Connection -# and check that its state updates appropriately... we might as make a habit -# of pushing them through two Connections with a fake network link in -# between. -class ConnectionPair: - def __init__(self) -> None: - self.conn = {CLIENT: Connection(CLIENT), SERVER: Connection(SERVER)} - self.other = {CLIENT: SERVER, SERVER: CLIENT} - - @property - def conns(self) -> ValuesView[Connection]: - return self.conn.values() - - # expect="match" if expect=send_events; expect=[...] to say what expected - def send( - self, - role: Type[Sentinel], - send_events: Union[List[Event], Event], - expect: Union[List[Event], Event, Literal["match"]] = "match", - ) -> bytes: - if not isinstance(send_events, list): - send_events = [send_events] - data = b"" - closed = False - for send_event in send_events: - new_data = self.conn[role].send(send_event) - if new_data is None: - closed = True - else: - data += new_data - # send uses b"" to mean b"", and None to mean closed - # receive uses b"" to mean closed, and None to mean "try again" - # so we have to translate between the two conventions - if data: - self.conn[self.other[role]].receive_data(data) - if closed: - self.conn[self.other[role]].receive_data(b"") - got_events = get_all_events(self.conn[self.other[role]]) - if expect == "match": - expect = send_events - if not isinstance(expect, list): - expect = [expect] - assert got_events == expect - return data diff --git a/myenv/Lib/site-packages/h11/tests/test_against_stdlib_http.py b/myenv/Lib/site-packages/h11/tests/test_against_stdlib_http.py deleted file mode 100644 index d2ee131..0000000 --- a/myenv/Lib/site-packages/h11/tests/test_against_stdlib_http.py +++ /dev/null @@ -1,115 +0,0 @@ -import json -import os.path -import socket -import socketserver -import threading -from contextlib import closing, contextmanager -from http.server import SimpleHTTPRequestHandler -from typing import Callable, Generator -from urllib.request import urlopen - -import h11 - - -@contextmanager -def socket_server( - handler: Callable[..., socketserver.BaseRequestHandler] -) -> Generator[socketserver.TCPServer, None, None]: - httpd = socketserver.TCPServer(("127.0.0.1", 0), handler) - thread = threading.Thread( - target=httpd.serve_forever, kwargs={"poll_interval": 0.01} - ) - thread.daemon = True - try: - thread.start() - yield httpd - finally: - httpd.shutdown() - - -test_file_path = os.path.join(os.path.dirname(__file__), "data/test-file") -with open(test_file_path, "rb") as f: - test_file_data = f.read() - - -class SingleMindedRequestHandler(SimpleHTTPRequestHandler): - def translate_path(self, path: str) -> str: - return test_file_path - - -def test_h11_as_client() -> None: - with socket_server(SingleMindedRequestHandler) as httpd: - with closing(socket.create_connection(httpd.server_address)) as s: - c = h11.Connection(h11.CLIENT) - - s.sendall( - c.send( # type: ignore[arg-type] - h11.Request( - method="GET", target="/foo", headers=[("Host", "localhost")] - ) - ) - ) - s.sendall(c.send(h11.EndOfMessage())) # type: ignore[arg-type] - - data = bytearray() - while True: - event = c.next_event() - print(event) - if event is h11.NEED_DATA: - # Use a small read buffer to make things more challenging - # and exercise more paths :-) - c.receive_data(s.recv(10)) - continue - if type(event) is h11.Response: - assert event.status_code == 200 - if type(event) is h11.Data: - data += event.data - if type(event) is h11.EndOfMessage: - break - assert bytes(data) == test_file_data - - -class H11RequestHandler(socketserver.BaseRequestHandler): - def handle(self) -> None: - with closing(self.request) as s: - c = h11.Connection(h11.SERVER) - request = None - while True: - event = c.next_event() - if event is h11.NEED_DATA: - # Use a small read buffer to make things more challenging - # and exercise more paths :-) - c.receive_data(s.recv(10)) - continue - if type(event) is h11.Request: - request = event - if type(event) is h11.EndOfMessage: - break - assert request is not None - info = json.dumps( - { - "method": request.method.decode("ascii"), - "target": request.target.decode("ascii"), - "headers": { - name.decode("ascii"): value.decode("ascii") - for (name, value) in request.headers - }, - } - ) - s.sendall(c.send(h11.Response(status_code=200, headers=[]))) # type: ignore[arg-type] - s.sendall(c.send(h11.Data(data=info.encode("ascii")))) - s.sendall(c.send(h11.EndOfMessage())) - - -def test_h11_as_server() -> None: - with socket_server(H11RequestHandler) as httpd: - host, port = httpd.server_address - url = "http://{}:{}/some-path".format(host, port) - with closing(urlopen(url)) as f: - assert f.getcode() == 200 - data = f.read() - info = json.loads(data.decode("ascii")) - print(info) - assert info["method"] == "GET" - assert info["target"] == "/some-path" - assert "urllib" in info["headers"]["user-agent"] diff --git a/myenv/Lib/site-packages/h11/tests/test_connection.py b/myenv/Lib/site-packages/h11/tests/test_connection.py deleted file mode 100644 index 73a27b9..0000000 --- a/myenv/Lib/site-packages/h11/tests/test_connection.py +++ /dev/null @@ -1,1122 +0,0 @@ -from typing import Any, cast, Dict, List, Optional, Tuple, Type - -import pytest - -from .._connection import _body_framing, _keep_alive, Connection, NEED_DATA, PAUSED -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .._state import ( - CLIENT, - CLOSED, - DONE, - ERROR, - IDLE, - MIGHT_SWITCH_PROTOCOL, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, - SWITCHED_PROTOCOL, -) -from .._util import LocalProtocolError, RemoteProtocolError, Sentinel -from .helpers import ConnectionPair, get_all_events, receive_and_get - - -def test__keep_alive() -> None: - assert _keep_alive( - Request(method="GET", target="/", headers=[("Host", "Example.com")]) - ) - assert not _keep_alive( - Request( - method="GET", - target="/", - headers=[("Host", "Example.com"), ("Connection", "close")], - ) - ) - assert not _keep_alive( - Request( - method="GET", - target="/", - headers=[("Host", "Example.com"), ("Connection", "a, b, cLOse, foo")], - ) - ) - assert not _keep_alive( - Request(method="GET", target="/", headers=[], http_version="1.0") # type: ignore[arg-type] - ) - - assert _keep_alive(Response(status_code=200, headers=[])) # type: ignore[arg-type] - assert not _keep_alive(Response(status_code=200, headers=[("Connection", "close")])) - assert not _keep_alive( - Response(status_code=200, headers=[("Connection", "a, b, cLOse, foo")]) - ) - assert not _keep_alive(Response(status_code=200, headers=[], http_version="1.0")) # type: ignore[arg-type] - - -def test__body_framing() -> None: - def headers(cl: Optional[int], te: bool) -> List[Tuple[str, str]]: - headers = [] - if cl is not None: - headers.append(("Content-Length", str(cl))) - if te: - headers.append(("Transfer-Encoding", "chunked")) - return headers - - def resp( - status_code: int = 200, cl: Optional[int] = None, te: bool = False - ) -> Response: - return Response(status_code=status_code, headers=headers(cl, te)) - - def req(cl: Optional[int] = None, te: bool = False) -> Request: - h = headers(cl, te) - h += [("Host", "example.com")] - return Request(method="GET", target="/", headers=h) - - # Special cases where the headers are ignored: - for kwargs in [{}, {"cl": 100}, {"te": True}, {"cl": 100, "te": True}]: - kwargs = cast(Dict[str, Any], kwargs) - for meth, r in [ - (b"HEAD", resp(**kwargs)), - (b"GET", resp(status_code=204, **kwargs)), - (b"GET", resp(status_code=304, **kwargs)), - ]: - assert _body_framing(meth, r) == ("content-length", (0,)) - - # Transfer-encoding - for kwargs in [{"te": True}, {"cl": 100, "te": True}]: - kwargs = cast(Dict[str, Any], kwargs) - for meth, r in [(None, req(**kwargs)), (b"GET", resp(**kwargs))]: # type: ignore - assert _body_framing(meth, r) == ("chunked", ()) - - # Content-Length - for meth, r in [(None, req(cl=100)), (b"GET", resp(cl=100))]: # type: ignore - assert _body_framing(meth, r) == ("content-length", (100,)) - - # No headers - assert _body_framing(None, req()) == ("content-length", (0,)) # type: ignore - assert _body_framing(b"GET", resp()) == ("http/1.0", ()) - - -def test_Connection_basics_and_content_length() -> None: - with pytest.raises(ValueError): - Connection("CLIENT") # type: ignore - - p = ConnectionPair() - assert p.conn[CLIENT].our_role is CLIENT - assert p.conn[CLIENT].their_role is SERVER - assert p.conn[SERVER].our_role is SERVER - assert p.conn[SERVER].their_role is CLIENT - - data = p.send( - CLIENT, - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Content-Length", "10")], - ), - ) - assert data == ( - b"GET / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 10\r\n\r\n" - ) - - for conn in p.conns: - assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - assert p.conn[CLIENT].our_state is SEND_BODY - assert p.conn[CLIENT].their_state is SEND_RESPONSE - assert p.conn[SERVER].our_state is SEND_RESPONSE - assert p.conn[SERVER].their_state is SEND_BODY - - assert p.conn[CLIENT].their_http_version is None - assert p.conn[SERVER].their_http_version == b"1.1" - - data = p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] - assert data == b"HTTP/1.1 100 \r\n\r\n" - - data = p.send(SERVER, Response(status_code=200, headers=[("Content-Length", "11")])) - assert data == b"HTTP/1.1 200 \r\nContent-Length: 11\r\n\r\n" - - for conn in p.conns: - assert conn.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} - - assert p.conn[CLIENT].their_http_version == b"1.1" - assert p.conn[SERVER].their_http_version == b"1.1" - - data = p.send(CLIENT, Data(data=b"12345")) - assert data == b"12345" - data = p.send( - CLIENT, Data(data=b"67890"), expect=[Data(data=b"67890"), EndOfMessage()] - ) - assert data == b"67890" - data = p.send(CLIENT, EndOfMessage(), expect=[]) - assert data == b"" - - for conn in p.conns: - assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} - - data = p.send(SERVER, Data(data=b"1234567890")) - assert data == b"1234567890" - data = p.send(SERVER, Data(data=b"1"), expect=[Data(data=b"1"), EndOfMessage()]) - assert data == b"1" - data = p.send(SERVER, EndOfMessage(), expect=[]) - assert data == b"" - - for conn in p.conns: - assert conn.states == {CLIENT: DONE, SERVER: DONE} - - -def test_chunked() -> None: - p = ConnectionPair() - - p.send( - CLIENT, - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], - ), - ) - data = p.send(CLIENT, Data(data=b"1234567890", chunk_start=True, chunk_end=True)) - assert data == b"a\r\n1234567890\r\n" - data = p.send(CLIENT, Data(data=b"abcde", chunk_start=True, chunk_end=True)) - assert data == b"5\r\nabcde\r\n" - data = p.send(CLIENT, Data(data=b""), expect=[]) - assert data == b"" - data = p.send(CLIENT, EndOfMessage(headers=[("hello", "there")])) - assert data == b"0\r\nhello: there\r\n\r\n" - - p.send( - SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) - ) - p.send(SERVER, Data(data=b"54321", chunk_start=True, chunk_end=True)) - p.send(SERVER, Data(data=b"12345", chunk_start=True, chunk_end=True)) - p.send(SERVER, EndOfMessage()) - - for conn in p.conns: - assert conn.states == {CLIENT: DONE, SERVER: DONE} - - -def test_chunk_boundaries() -> None: - conn = Connection(our_role=SERVER) - - request = ( - b"POST / HTTP/1.1\r\n" - b"Host: example.com\r\n" - b"Transfer-Encoding: chunked\r\n" - b"\r\n" - ) - conn.receive_data(request) - assert conn.next_event() == Request( - method="POST", - target="/", - headers=[("Host", "example.com"), ("Transfer-Encoding", "chunked")], - ) - assert conn.next_event() is NEED_DATA - - conn.receive_data(b"5\r\nhello\r\n") - assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) - - conn.receive_data(b"5\r\nhel") - assert conn.next_event() == Data(data=b"hel", chunk_start=True, chunk_end=False) - - conn.receive_data(b"l") - assert conn.next_event() == Data(data=b"l", chunk_start=False, chunk_end=False) - - conn.receive_data(b"o\r\n") - assert conn.next_event() == Data(data=b"o", chunk_start=False, chunk_end=True) - - conn.receive_data(b"5\r\nhello") - assert conn.next_event() == Data(data=b"hello", chunk_start=True, chunk_end=True) - - conn.receive_data(b"\r\n") - assert conn.next_event() == NEED_DATA - - conn.receive_data(b"0\r\n\r\n") - assert conn.next_event() == EndOfMessage() - - -def test_client_talking_to_http10_server() -> None: - c = Connection(CLIENT) - c.send(Request(method="GET", target="/", headers=[("Host", "example.com")])) - c.send(EndOfMessage()) - assert c.our_state is DONE - # No content-length, so Http10 framing for body - assert receive_and_get(c, b"HTTP/1.0 200 OK\r\n\r\n") == [ - Response(status_code=200, headers=[], http_version="1.0", reason=b"OK") # type: ignore[arg-type] - ] - assert c.our_state is MUST_CLOSE - assert receive_and_get(c, b"12345") == [Data(data=b"12345")] - assert receive_and_get(c, b"67890") == [Data(data=b"67890")] - assert receive_and_get(c, b"") == [EndOfMessage(), ConnectionClosed()] - assert c.their_state is CLOSED - - -def test_server_talking_to_http10_client() -> None: - c = Connection(SERVER) - # No content-length, so no body - # NB: no host header - assert receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") == [ - Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] - EndOfMessage(), - ] - assert c.their_state is MUST_CLOSE - - # We automatically Connection: close back at them - assert ( - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" - ) - - assert c.send(Data(data=b"12345")) == b"12345" - assert c.send(EndOfMessage()) == b"" - assert c.our_state is MUST_CLOSE - - # Check that it works if they do send Content-Length - c = Connection(SERVER) - # NB: no host header - assert receive_and_get(c, b"POST / HTTP/1.0\r\nContent-Length: 10\r\n\r\n1") == [ - Request( - method="POST", - target="/", - headers=[("Content-Length", "10")], - http_version="1.0", - ), - Data(data=b"1"), - ] - assert receive_and_get(c, b"234567890") == [Data(data=b"234567890"), EndOfMessage()] - assert c.their_state is MUST_CLOSE - assert receive_and_get(c, b"") == [ConnectionClosed()] - - -def test_automatic_transfer_encoding_in_response() -> None: - # Check that in responses, the user can specify either Transfer-Encoding: - # chunked or no framing at all, and in both cases we automatically select - # the right option depending on whether the peer speaks HTTP/1.0 or - # HTTP/1.1 - for user_headers in [ - [("Transfer-Encoding", "chunked")], - [], - # In fact, this even works if Content-Length is set, - # because if both are set then Transfer-Encoding wins - [("Transfer-Encoding", "chunked"), ("Content-Length", "100")], - ]: - user_headers = cast(List[Tuple[str, str]], user_headers) - p = ConnectionPair() - p.send( - CLIENT, - [ - Request(method="GET", target="/", headers=[("Host", "example.com")]), - EndOfMessage(), - ], - ) - # When speaking to HTTP/1.1 client, all of the above cases get - # normalized to Transfer-Encoding: chunked - p.send( - SERVER, - Response(status_code=200, headers=user_headers), - expect=Response( - status_code=200, headers=[("Transfer-Encoding", "chunked")] - ), - ) - - # When speaking to HTTP/1.0 client, all of the above cases get - # normalized to no-framing-headers - c = Connection(SERVER) - receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") - assert ( - c.send(Response(status_code=200, headers=user_headers)) - == b"HTTP/1.1 200 \r\nConnection: close\r\n\r\n" - ) - assert c.send(Data(data=b"12345")) == b"12345" - - -def test_automagic_connection_close_handling() -> None: - p = ConnectionPair() - # If the user explicitly sets Connection: close, then we notice and - # respect it - p.send( - CLIENT, - [ - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Connection", "close")], - ), - EndOfMessage(), - ], - ) - for conn in p.conns: - assert conn.states[CLIENT] is MUST_CLOSE - # And if the client sets it, the server automatically echoes it back - p.send( - SERVER, - # no header here... - [Response(status_code=204, headers=[]), EndOfMessage()], # type: ignore[arg-type] - # ...but oh look, it arrived anyway - expect=[ - Response(status_code=204, headers=[("connection", "close")]), - EndOfMessage(), - ], - ) - for conn in p.conns: - assert conn.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} - - -def test_100_continue() -> None: - def setup() -> ConnectionPair: - p = ConnectionPair() - p.send( - CLIENT, - Request( - method="GET", - target="/", - headers=[ - ("Host", "example.com"), - ("Content-Length", "100"), - ("Expect", "100-continue"), - ], - ), - ) - for conn in p.conns: - assert conn.client_is_waiting_for_100_continue - assert not p.conn[CLIENT].they_are_waiting_for_100_continue - assert p.conn[SERVER].they_are_waiting_for_100_continue - return p - - # Disabled by 100 Continue - p = setup() - p.send(SERVER, InformationalResponse(status_code=100, headers=[])) # type: ignore[arg-type] - for conn in p.conns: - assert not conn.client_is_waiting_for_100_continue - assert not conn.they_are_waiting_for_100_continue - - # Disabled by a real response - p = setup() - p.send( - SERVER, Response(status_code=200, headers=[("Transfer-Encoding", "chunked")]) - ) - for conn in p.conns: - assert not conn.client_is_waiting_for_100_continue - assert not conn.they_are_waiting_for_100_continue - - # Disabled by the client going ahead and sending stuff anyway - p = setup() - p.send(CLIENT, Data(data=b"12345")) - for conn in p.conns: - assert not conn.client_is_waiting_for_100_continue - assert not conn.they_are_waiting_for_100_continue - - -def test_max_incomplete_event_size_countermeasure() -> None: - # Infinitely long headers are definitely not okay - c = Connection(SERVER) - c.receive_data(b"GET / HTTP/1.0\r\nEndless: ") - assert c.next_event() is NEED_DATA - with pytest.raises(RemoteProtocolError): - while True: - c.receive_data(b"a" * 1024) - c.next_event() - - # Checking that the same header is accepted / rejected depending on the - # max_incomplete_event_size setting: - c = Connection(SERVER, max_incomplete_event_size=5000) - c.receive_data(b"GET / HTTP/1.0\r\nBig: ") - c.receive_data(b"a" * 4000) - c.receive_data(b"\r\n\r\n") - assert get_all_events(c) == [ - Request( - method="GET", target="/", http_version="1.0", headers=[("big", "a" * 4000)] - ), - EndOfMessage(), - ] - - c = Connection(SERVER, max_incomplete_event_size=4000) - c.receive_data(b"GET / HTTP/1.0\r\nBig: ") - c.receive_data(b"a" * 4000) - with pytest.raises(RemoteProtocolError): - c.next_event() - - # Temporarily exceeding the size limit is fine, as long as its done with - # complete events: - c = Connection(SERVER, max_incomplete_event_size=5000) - c.receive_data(b"GET / HTTP/1.0\r\nContent-Length: 10000") - c.receive_data(b"\r\n\r\n" + b"a" * 10000) - assert get_all_events(c) == [ - Request( - method="GET", - target="/", - http_version="1.0", - headers=[("Content-Length", "10000")], - ), - Data(data=b"a" * 10000), - EndOfMessage(), - ] - - c = Connection(SERVER, max_incomplete_event_size=100) - # Two pipelined requests to create a way-too-big receive buffer... but - # it's fine because we're not checking - c.receive_data( - b"GET /1 HTTP/1.1\r\nHost: a\r\n\r\n" - b"GET /2 HTTP/1.1\r\nHost: b\r\n\r\n" + b"X" * 1000 - ) - assert get_all_events(c) == [ - Request(method="GET", target="/1", headers=[("host", "a")]), - EndOfMessage(), - ] - # Even more data comes in, still no problem - c.receive_data(b"X" * 1000) - # We can respond and reuse to get the second pipelined request - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - c.start_next_cycle() - assert get_all_events(c) == [ - Request(method="GET", target="/2", headers=[("host", "b")]), - EndOfMessage(), - ] - # But once we unpause and try to read the next message, and find that it's - # incomplete and the buffer is *still* way too large, then *that's* a - # problem: - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - c.start_next_cycle() - with pytest.raises(RemoteProtocolError): - c.next_event() - - -def test_reuse_simple() -> None: - p = ConnectionPair() - p.send( - CLIENT, - [Request(method="GET", target="/", headers=[("Host", "a")]), EndOfMessage()], - ) - p.send( - SERVER, - [ - Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), - EndOfMessage(), - ], - ) - for conn in p.conns: - assert conn.states == {CLIENT: DONE, SERVER: DONE} - conn.start_next_cycle() - - p.send( - CLIENT, - [ - Request(method="DELETE", target="/foo", headers=[("Host", "a")]), - EndOfMessage(), - ], - ) - p.send( - SERVER, - [ - Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), - EndOfMessage(), - ], - ) - - -def test_pipelining() -> None: - # Client doesn't support pipelining, so we have to do this by hand - c = Connection(SERVER) - assert c.next_event() is NEED_DATA - # 3 requests all bunched up - c.receive_data( - b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" - b"12345" - b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" - b"67890" - b"GET /3 HTTP/1.1\r\nHost: a.com\r\n\r\n" - ) - assert get_all_events(c) == [ - Request( - method="GET", - target="/1", - headers=[("Host", "a.com"), ("Content-Length", "5")], - ), - Data(data=b"12345"), - EndOfMessage(), - ] - assert c.their_state is DONE - assert c.our_state is SEND_RESPONSE - - assert c.next_event() is PAUSED - - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - assert c.their_state is DONE - assert c.our_state is DONE - - c.start_next_cycle() - - assert get_all_events(c) == [ - Request( - method="GET", - target="/2", - headers=[("Host", "a.com"), ("Content-Length", "5")], - ), - Data(data=b"67890"), - EndOfMessage(), - ] - assert c.next_event() is PAUSED - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - c.start_next_cycle() - - assert get_all_events(c) == [ - Request(method="GET", target="/3", headers=[("Host", "a.com")]), - EndOfMessage(), - ] - # Doesn't pause this time, no trailing data - assert c.next_event() is NEED_DATA - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - - # Arrival of more data triggers pause - assert c.next_event() is NEED_DATA - c.receive_data(b"SADF") - assert c.next_event() is PAUSED - assert c.trailing_data == (b"SADF", False) - # If EOF arrives while paused, we don't see that either: - c.receive_data(b"") - assert c.trailing_data == (b"SADF", True) - assert c.next_event() is PAUSED - c.receive_data(b"") - assert c.next_event() is PAUSED - # Can't call receive_data with non-empty buf after closing it - with pytest.raises(RuntimeError): - c.receive_data(b"FDSA") - - -def test_protocol_switch() -> None: - for (req, deny, accept) in [ - ( - Request( - method="CONNECT", - target="example.com:443", - headers=[("Host", "foo"), ("Content-Length", "1")], - ), - Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), - Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), - ), - ( - Request( - method="GET", - target="/", - headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], - ), - Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), - InformationalResponse(status_code=101, headers=[("Upgrade", "a")]), - ), - ( - Request( - method="CONNECT", - target="example.com:443", - headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], - ), - Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), - # Accept CONNECT, not upgrade - Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), - ), - ( - Request( - method="CONNECT", - target="example.com:443", - headers=[("Host", "foo"), ("Content-Length", "1"), ("Upgrade", "a, b")], - ), - Response(status_code=404, headers=[(b"transfer-encoding", b"chunked")]), - # Accept Upgrade, not CONNECT - InformationalResponse(status_code=101, headers=[("Upgrade", "b")]), - ), - ]: - - def setup() -> ConnectionPair: - p = ConnectionPair() - p.send(CLIENT, req) - # No switch-related state change stuff yet; the client has to - # finish the request before that kicks in - for conn in p.conns: - assert conn.states[CLIENT] is SEND_BODY - p.send(CLIENT, [Data(data=b"1"), EndOfMessage()]) - for conn in p.conns: - assert conn.states[CLIENT] is MIGHT_SWITCH_PROTOCOL - assert p.conn[SERVER].next_event() is PAUSED - return p - - # Test deny case - p = setup() - p.send(SERVER, deny) - for conn in p.conns: - assert conn.states == {CLIENT: DONE, SERVER: SEND_BODY} - p.send(SERVER, EndOfMessage()) - # Check that re-use is still allowed after a denial - for conn in p.conns: - conn.start_next_cycle() - - # Test accept case - p = setup() - p.send(SERVER, accept) - for conn in p.conns: - assert conn.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} - conn.receive_data(b"123") - assert conn.next_event() is PAUSED - conn.receive_data(b"456") - assert conn.next_event() is PAUSED - assert conn.trailing_data == (b"123456", False) - - # Pausing in might-switch, then recovery - # (weird artificial case where the trailing data actually is valid - # HTTP for some reason, because this makes it easier to test the state - # logic) - p = setup() - sc = p.conn[SERVER] - sc.receive_data(b"GET / HTTP/1.0\r\n\r\n") - assert sc.next_event() is PAUSED - assert sc.trailing_data == (b"GET / HTTP/1.0\r\n\r\n", False) - sc.send(deny) - assert sc.next_event() is PAUSED - sc.send(EndOfMessage()) - sc.start_next_cycle() - assert get_all_events(sc) == [ - Request(method="GET", target="/", headers=[], http_version="1.0"), # type: ignore[arg-type] - EndOfMessage(), - ] - - # When we're DONE, have no trailing data, and the connection gets - # closed, we report ConnectionClosed(). When we're in might-switch or - # switched, we don't. - p = setup() - sc = p.conn[SERVER] - sc.receive_data(b"") - assert sc.next_event() is PAUSED - assert sc.trailing_data == (b"", True) - p.send(SERVER, accept) - assert sc.next_event() is PAUSED - - p = setup() - sc = p.conn[SERVER] - sc.receive_data(b"") - assert sc.next_event() is PAUSED - sc.send(deny) - assert sc.next_event() == ConnectionClosed() - - # You can't send after switching protocols, or while waiting for a - # protocol switch - p = setup() - with pytest.raises(LocalProtocolError): - p.conn[CLIENT].send( - Request(method="GET", target="/", headers=[("Host", "a")]) - ) - p = setup() - p.send(SERVER, accept) - with pytest.raises(LocalProtocolError): - p.conn[SERVER].send(Data(data=b"123")) - - -def test_close_simple() -> None: - # Just immediately closing a new connection without anything having - # happened yet. - for (who_shot_first, who_shot_second) in [(CLIENT, SERVER), (SERVER, CLIENT)]: - - def setup() -> ConnectionPair: - p = ConnectionPair() - p.send(who_shot_first, ConnectionClosed()) - for conn in p.conns: - assert conn.states == { - who_shot_first: CLOSED, - who_shot_second: MUST_CLOSE, - } - return p - - # You can keep putting b"" into a closed connection, and you keep - # getting ConnectionClosed() out: - p = setup() - assert p.conn[who_shot_second].next_event() == ConnectionClosed() - assert p.conn[who_shot_second].next_event() == ConnectionClosed() - p.conn[who_shot_second].receive_data(b"") - assert p.conn[who_shot_second].next_event() == ConnectionClosed() - # Second party can close... - p = setup() - p.send(who_shot_second, ConnectionClosed()) - for conn in p.conns: - assert conn.our_state is CLOSED - assert conn.their_state is CLOSED - # But trying to receive new data on a closed connection is a - # RuntimeError (not ProtocolError, because the problem here isn't - # violation of HTTP, it's violation of physics) - p = setup() - with pytest.raises(RuntimeError): - p.conn[who_shot_second].receive_data(b"123") - # And receiving new data on a MUST_CLOSE connection is a ProtocolError - p = setup() - p.conn[who_shot_first].receive_data(b"GET") - with pytest.raises(RemoteProtocolError): - p.conn[who_shot_first].next_event() - - -def test_close_different_states() -> None: - req = [ - Request(method="GET", target="/foo", headers=[("Host", "a")]), - EndOfMessage(), - ] - resp = [ - Response(status_code=200, headers=[(b"transfer-encoding", b"chunked")]), - EndOfMessage(), - ] - - # Client before request - p = ConnectionPair() - p.send(CLIENT, ConnectionClosed()) - for conn in p.conns: - assert conn.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} - - # Client after request - p = ConnectionPair() - p.send(CLIENT, req) - p.send(CLIENT, ConnectionClosed()) - for conn in p.conns: - assert conn.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} - - # Server after request -> not allowed - p = ConnectionPair() - p.send(CLIENT, req) - with pytest.raises(LocalProtocolError): - p.conn[SERVER].send(ConnectionClosed()) - p.conn[CLIENT].receive_data(b"") - with pytest.raises(RemoteProtocolError): - p.conn[CLIENT].next_event() - - # Server after response - p = ConnectionPair() - p.send(CLIENT, req) - p.send(SERVER, resp) - p.send(SERVER, ConnectionClosed()) - for conn in p.conns: - assert conn.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} - - # Both after closing (ConnectionClosed() is idempotent) - p = ConnectionPair() - p.send(CLIENT, req) - p.send(SERVER, resp) - p.send(CLIENT, ConnectionClosed()) - p.send(SERVER, ConnectionClosed()) - p.send(CLIENT, ConnectionClosed()) - p.send(SERVER, ConnectionClosed()) - - # In the middle of sending -> not allowed - p = ConnectionPair() - p.send( - CLIENT, - Request( - method="GET", target="/", headers=[("Host", "a"), ("Content-Length", "10")] - ), - ) - with pytest.raises(LocalProtocolError): - p.conn[CLIENT].send(ConnectionClosed()) - p.conn[SERVER].receive_data(b"") - with pytest.raises(RemoteProtocolError): - p.conn[SERVER].next_event() - - -# Receive several requests and then client shuts down their side of the -# connection; we can respond to each -def test_pipelined_close() -> None: - c = Connection(SERVER) - # 2 requests then a close - c.receive_data( - b"GET /1 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" - b"12345" - b"GET /2 HTTP/1.1\r\nHost: a.com\r\nContent-Length: 5\r\n\r\n" - b"67890" - ) - c.receive_data(b"") - assert get_all_events(c) == [ - Request( - method="GET", - target="/1", - headers=[("host", "a.com"), ("content-length", "5")], - ), - Data(data=b"12345"), - EndOfMessage(), - ] - assert c.states[CLIENT] is DONE - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - assert c.states[SERVER] is DONE - c.start_next_cycle() - assert get_all_events(c) == [ - Request( - method="GET", - target="/2", - headers=[("host", "a.com"), ("content-length", "5")], - ), - Data(data=b"67890"), - EndOfMessage(), - ConnectionClosed(), - ] - assert c.states == {CLIENT: CLOSED, SERVER: SEND_RESPONSE} - c.send(Response(status_code=200, headers=[])) # type: ignore[arg-type] - c.send(EndOfMessage()) - assert c.states == {CLIENT: CLOSED, SERVER: MUST_CLOSE} - c.send(ConnectionClosed()) - assert c.states == {CLIENT: CLOSED, SERVER: CLOSED} - - -def test_sendfile() -> None: - class SendfilePlaceholder: - def __len__(self) -> int: - return 10 - - placeholder = SendfilePlaceholder() - - def setup( - header: Tuple[str, str], http_version: str - ) -> Tuple[Connection, Optional[List[bytes]]]: - c = Connection(SERVER) - receive_and_get( - c, "GET / HTTP/{}\r\nHost: a\r\n\r\n".format(http_version).encode("ascii") - ) - headers = [] - if header: - headers.append(header) - c.send(Response(status_code=200, headers=headers)) - return c, c.send_with_data_passthrough(Data(data=placeholder)) # type: ignore - - c, data = setup(("Content-Length", "10"), "1.1") - assert data == [placeholder] # type: ignore - # Raises an error if the connection object doesn't think we've sent - # exactly 10 bytes - c.send(EndOfMessage()) - - _, data = setup(("Transfer-Encoding", "chunked"), "1.1") - assert placeholder in data # type: ignore - data[data.index(placeholder)] = b"x" * 10 # type: ignore - assert b"".join(data) == b"a\r\nxxxxxxxxxx\r\n" # type: ignore - - c, data = setup(None, "1.0") # type: ignore - assert data == [placeholder] # type: ignore - assert c.our_state is SEND_BODY - - -def test_errors() -> None: - # After a receive error, you can't receive - for role in [CLIENT, SERVER]: - c = Connection(our_role=role) - c.receive_data(b"gibberish\r\n\r\n") - with pytest.raises(RemoteProtocolError): - c.next_event() - # Now any attempt to receive continues to raise - assert c.their_state is ERROR - assert c.our_state is not ERROR - print(c._cstate.states) - with pytest.raises(RemoteProtocolError): - c.next_event() - # But we can still yell at the client for sending us gibberish - if role is SERVER: - assert ( - c.send(Response(status_code=400, headers=[])) # type: ignore[arg-type] - == b"HTTP/1.1 400 \r\nConnection: close\r\n\r\n" - ) - - # After an error sending, you can no longer send - # (This is especially important for things like content-length errors, - # where there's complex internal state being modified) - def conn(role: Type[Sentinel]) -> Connection: - c = Connection(our_role=role) - if role is SERVER: - # Put it into the state where it *could* send a response... - receive_and_get(c, b"GET / HTTP/1.0\r\n\r\n") - assert c.our_state is SEND_RESPONSE - return c - - for role in [CLIENT, SERVER]: - if role is CLIENT: - # This HTTP/1.0 request won't be detected as bad until after we go - # through the state machine and hit the writing code - good = Request(method="GET", target="/", headers=[("Host", "example.com")]) - bad = Request( - method="GET", - target="/", - headers=[("Host", "example.com")], - http_version="1.0", - ) - elif role is SERVER: - good = Response(status_code=200, headers=[]) # type: ignore[arg-type,assignment] - bad = Response(status_code=200, headers=[], http_version="1.0") # type: ignore[arg-type,assignment] - # Make sure 'good' actually is good - c = conn(role) - c.send(good) - assert c.our_state is not ERROR - # Do that again, but this time sending 'bad' first - c = conn(role) - with pytest.raises(LocalProtocolError): - c.send(bad) - assert c.our_state is ERROR - assert c.their_state is not ERROR - # Now 'good' is not so good - with pytest.raises(LocalProtocolError): - c.send(good) - - # And check send_failed() too - c = conn(role) - c.send_failed() - assert c.our_state is ERROR - assert c.their_state is not ERROR - # This is idempotent - c.send_failed() - assert c.our_state is ERROR - assert c.their_state is not ERROR - - -def test_idle_receive_nothing() -> None: - # At one point this incorrectly raised an error - for role in [CLIENT, SERVER]: - c = Connection(role) - assert c.next_event() is NEED_DATA - - -def test_connection_drop() -> None: - c = Connection(SERVER) - c.receive_data(b"GET /") - assert c.next_event() is NEED_DATA - c.receive_data(b"") - with pytest.raises(RemoteProtocolError): - c.next_event() - - -def test_408_request_timeout() -> None: - # Should be able to send this spontaneously as a server without seeing - # anything from client - p = ConnectionPair() - p.send(SERVER, Response(status_code=408, headers=[(b"connection", b"close")])) - - -# This used to raise IndexError -def test_empty_request() -> None: - c = Connection(SERVER) - c.receive_data(b"\r\n") - with pytest.raises(RemoteProtocolError): - c.next_event() - - -# This used to raise IndexError -def test_empty_response() -> None: - c = Connection(CLIENT) - c.send(Request(method="GET", target="/", headers=[("Host", "a")])) - c.receive_data(b"\r\n") - with pytest.raises(RemoteProtocolError): - c.next_event() - - -@pytest.mark.parametrize( - "data", - [ - b"\x00", - b"\x20", - b"\x16\x03\x01\x00\xa5", # Typical start of a TLS Client Hello - ], -) -def test_early_detection_of_invalid_request(data: bytes) -> None: - c = Connection(SERVER) - # Early detection should occur before even receiving a `\r\n` - c.receive_data(data) - with pytest.raises(RemoteProtocolError): - c.next_event() - - -@pytest.mark.parametrize( - "data", - [ - b"\x00", - b"\x20", - b"\x16\x03\x03\x00\x31", # Typical start of a TLS Server Hello - ], -) -def test_early_detection_of_invalid_response(data: bytes) -> None: - c = Connection(CLIENT) - # Early detection should occur before even receiving a `\r\n` - c.receive_data(data) - with pytest.raises(RemoteProtocolError): - c.next_event() - - -# This used to give different headers for HEAD and GET. -# The correct way to handle HEAD is to put whatever headers we *would* have -# put if it were a GET -- even though we know that for HEAD, those headers -# will be ignored. -def test_HEAD_framing_headers() -> None: - def setup(method: bytes, http_version: bytes) -> Connection: - c = Connection(SERVER) - c.receive_data( - method + b" / HTTP/" + http_version + b"\r\n" + b"Host: example.com\r\n\r\n" - ) - assert type(c.next_event()) is Request - assert type(c.next_event()) is EndOfMessage - return c - - for method in [b"GET", b"HEAD"]: - # No Content-Length, HTTP/1.1 peer, should use chunked - c = setup(method, b"1.1") - assert ( - c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] - b"Transfer-Encoding: chunked\r\n\r\n" - ) - - # No Content-Length, HTTP/1.0 peer, frame with connection: close - c = setup(method, b"1.0") - assert ( - c.send(Response(status_code=200, headers=[])) == b"HTTP/1.1 200 \r\n" # type: ignore[arg-type] - b"Connection: close\r\n\r\n" - ) - - # Content-Length + Transfer-Encoding, TE wins - c = setup(method, b"1.1") - assert ( - c.send( - Response( - status_code=200, - headers=[ - ("Content-Length", "100"), - ("Transfer-Encoding", "chunked"), - ], - ) - ) - == b"HTTP/1.1 200 \r\n" - b"Transfer-Encoding: chunked\r\n\r\n" - ) - - -def test_special_exceptions_for_lost_connection_in_message_body() -> None: - c = Connection(SERVER) - c.receive_data( - b"POST / HTTP/1.1\r\n" b"Host: example.com\r\n" b"Content-Length: 100\r\n\r\n" - ) - assert type(c.next_event()) is Request - assert c.next_event() is NEED_DATA - c.receive_data(b"12345") - assert c.next_event() == Data(data=b"12345") - c.receive_data(b"") - with pytest.raises(RemoteProtocolError) as excinfo: - c.next_event() - assert "received 5 bytes" in str(excinfo.value) - assert "expected 100" in str(excinfo.value) - - c = Connection(SERVER) - c.receive_data( - b"POST / HTTP/1.1\r\n" - b"Host: example.com\r\n" - b"Transfer-Encoding: chunked\r\n\r\n" - ) - assert type(c.next_event()) is Request - assert c.next_event() is NEED_DATA - c.receive_data(b"8\r\n012345") - assert c.next_event().data == b"012345" # type: ignore - c.receive_data(b"") - with pytest.raises(RemoteProtocolError) as excinfo: - c.next_event() - assert "incomplete chunked read" in str(excinfo.value) diff --git a/myenv/Lib/site-packages/h11/tests/test_events.py b/myenv/Lib/site-packages/h11/tests/test_events.py deleted file mode 100644 index bc6c313..0000000 --- a/myenv/Lib/site-packages/h11/tests/test_events.py +++ /dev/null @@ -1,150 +0,0 @@ -from http import HTTPStatus - -import pytest - -from .. import _events -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .._util import LocalProtocolError - - -def test_events() -> None: - with pytest.raises(LocalProtocolError): - # Missing Host: - req = Request( - method="GET", target="/", headers=[("a", "b")], http_version="1.1" - ) - # But this is okay (HTTP/1.0) - req = Request(method="GET", target="/", headers=[("a", "b")], http_version="1.0") - # fields are normalized - assert req.method == b"GET" - assert req.target == b"/" - assert req.headers == [(b"a", b"b")] - assert req.http_version == b"1.0" - - # This is also okay -- has a Host (with weird capitalization, which is ok) - req = Request( - method="GET", - target="/", - headers=[("a", "b"), ("hOSt", "example.com")], - http_version="1.1", - ) - # we normalize header capitalization - assert req.headers == [(b"a", b"b"), (b"host", b"example.com")] - - # Multiple host is bad too - with pytest.raises(LocalProtocolError): - req = Request( - method="GET", - target="/", - headers=[("Host", "a"), ("Host", "a")], - http_version="1.1", - ) - # Even for HTTP/1.0 - with pytest.raises(LocalProtocolError): - req = Request( - method="GET", - target="/", - headers=[("Host", "a"), ("Host", "a")], - http_version="1.0", - ) - - # Header values are validated - for bad_char in "\x00\r\n\f\v": - with pytest.raises(LocalProtocolError): - req = Request( - method="GET", - target="/", - headers=[("Host", "a"), ("Foo", "asd" + bad_char)], - http_version="1.0", - ) - - # But for compatibility we allow non-whitespace control characters, even - # though they're forbidden by the spec. - Request( - method="GET", - target="/", - headers=[("Host", "a"), ("Foo", "asd\x01\x02\x7f")], - http_version="1.0", - ) - - # Request target is validated - for bad_byte in b"\x00\x20\x7f\xee": - target = bytearray(b"/") - target.append(bad_byte) - with pytest.raises(LocalProtocolError): - Request( - method="GET", target=target, headers=[("Host", "a")], http_version="1.1" - ) - - # Request method is validated - with pytest.raises(LocalProtocolError): - Request( - method="GET / HTTP/1.1", - target=target, - headers=[("Host", "a")], - http_version="1.1", - ) - - ir = InformationalResponse(status_code=100, headers=[("Host", "a")]) - assert ir.status_code == 100 - assert ir.headers == [(b"host", b"a")] - assert ir.http_version == b"1.1" - - with pytest.raises(LocalProtocolError): - InformationalResponse(status_code=200, headers=[("Host", "a")]) - - resp = Response(status_code=204, headers=[], http_version="1.0") # type: ignore[arg-type] - assert resp.status_code == 204 - assert resp.headers == [] - assert resp.http_version == b"1.0" - - with pytest.raises(LocalProtocolError): - resp = Response(status_code=100, headers=[], http_version="1.0") # type: ignore[arg-type] - - with pytest.raises(LocalProtocolError): - Response(status_code="100", headers=[], http_version="1.0") # type: ignore[arg-type] - - with pytest.raises(LocalProtocolError): - InformationalResponse(status_code=b"100", headers=[], http_version="1.0") # type: ignore[arg-type] - - d = Data(data=b"asdf") - assert d.data == b"asdf" - - eom = EndOfMessage() - assert eom.headers == [] - - cc = ConnectionClosed() - assert repr(cc) == "ConnectionClosed()" - - -def test_intenum_status_code() -> None: - # https://github.com/python-hyper/h11/issues/72 - - r = Response(status_code=HTTPStatus.OK, headers=[], http_version="1.0") # type: ignore[arg-type] - assert r.status_code == HTTPStatus.OK - assert type(r.status_code) is not type(HTTPStatus.OK) - assert type(r.status_code) is int - - -def test_header_casing() -> None: - r = Request( - method="GET", - target="/", - headers=[("Host", "example.org"), ("Connection", "keep-alive")], - http_version="1.1", - ) - assert len(r.headers) == 2 - assert r.headers[0] == (b"host", b"example.org") - assert r.headers == [(b"host", b"example.org"), (b"connection", b"keep-alive")] - assert r.headers.raw_items() == [ - (b"Host", b"example.org"), - (b"Connection", b"keep-alive"), - ] diff --git a/myenv/Lib/site-packages/h11/tests/test_headers.py b/myenv/Lib/site-packages/h11/tests/test_headers.py deleted file mode 100644 index ba53d08..0000000 --- a/myenv/Lib/site-packages/h11/tests/test_headers.py +++ /dev/null @@ -1,157 +0,0 @@ -import pytest - -from .._events import Request -from .._headers import ( - get_comma_header, - has_expect_100_continue, - Headers, - normalize_and_validate, - set_comma_header, -) -from .._util import LocalProtocolError - - -def test_normalize_and_validate() -> None: - assert normalize_and_validate([("foo", "bar")]) == [(b"foo", b"bar")] - assert normalize_and_validate([(b"foo", b"bar")]) == [(b"foo", b"bar")] - - # no leading/trailing whitespace in names - with pytest.raises(LocalProtocolError): - normalize_and_validate([(b"foo ", "bar")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([(b" foo", "bar")]) - - # no weird characters in names - with pytest.raises(LocalProtocolError) as excinfo: - normalize_and_validate([(b"foo bar", b"baz")]) - assert "foo bar" in str(excinfo.value) - with pytest.raises(LocalProtocolError): - normalize_and_validate([(b"foo\x00bar", b"baz")]) - # Not even 8-bit characters: - with pytest.raises(LocalProtocolError): - normalize_and_validate([(b"foo\xffbar", b"baz")]) - # And not even the control characters we allow in values: - with pytest.raises(LocalProtocolError): - normalize_and_validate([(b"foo\x01bar", b"baz")]) - - # no return or NUL characters in values - with pytest.raises(LocalProtocolError) as excinfo: - normalize_and_validate([("foo", "bar\rbaz")]) - assert "bar\\rbaz" in str(excinfo.value) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", "bar\nbaz")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", "bar\x00baz")]) - # no leading/trailing whitespace - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", "barbaz ")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", " barbaz")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", "barbaz\t")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("foo", "\tbarbaz")]) - - # content-length - assert normalize_and_validate([("Content-Length", "1")]) == [ - (b"content-length", b"1") - ] - with pytest.raises(LocalProtocolError): - normalize_and_validate([("Content-Length", "asdf")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("Content-Length", "1x")]) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("Content-Length", "1"), ("Content-Length", "2")]) - assert normalize_and_validate( - [("Content-Length", "0"), ("Content-Length", "0")] - ) == [(b"content-length", b"0")] - assert normalize_and_validate([("Content-Length", "0 , 0")]) == [ - (b"content-length", b"0") - ] - with pytest.raises(LocalProtocolError): - normalize_and_validate( - [("Content-Length", "1"), ("Content-Length", "1"), ("Content-Length", "2")] - ) - with pytest.raises(LocalProtocolError): - normalize_and_validate([("Content-Length", "1 , 1,2")]) - - # transfer-encoding - assert normalize_and_validate([("Transfer-Encoding", "chunked")]) == [ - (b"transfer-encoding", b"chunked") - ] - assert normalize_and_validate([("Transfer-Encoding", "cHuNkEd")]) == [ - (b"transfer-encoding", b"chunked") - ] - with pytest.raises(LocalProtocolError) as excinfo: - normalize_and_validate([("Transfer-Encoding", "gzip")]) - assert excinfo.value.error_status_hint == 501 # Not Implemented - with pytest.raises(LocalProtocolError) as excinfo: - normalize_and_validate( - [("Transfer-Encoding", "chunked"), ("Transfer-Encoding", "gzip")] - ) - assert excinfo.value.error_status_hint == 501 # Not Implemented - - -def test_get_set_comma_header() -> None: - headers = normalize_and_validate( - [ - ("Connection", "close"), - ("whatever", "something"), - ("connectiON", "fOo,, , BAR"), - ] - ) - - assert get_comma_header(headers, b"connection") == [b"close", b"foo", b"bar"] - - headers = set_comma_header(headers, b"newthing", ["a", "b"]) # type: ignore - - with pytest.raises(LocalProtocolError): - set_comma_header(headers, b"newthing", [" a", "b"]) # type: ignore - - assert headers == [ - (b"connection", b"close"), - (b"whatever", b"something"), - (b"connection", b"fOo,, , BAR"), - (b"newthing", b"a"), - (b"newthing", b"b"), - ] - - headers = set_comma_header(headers, b"whatever", ["different thing"]) # type: ignore - - assert headers == [ - (b"connection", b"close"), - (b"connection", b"fOo,, , BAR"), - (b"newthing", b"a"), - (b"newthing", b"b"), - (b"whatever", b"different thing"), - ] - - -def test_has_100_continue() -> None: - assert has_expect_100_continue( - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Expect", "100-continue")], - ) - ) - assert not has_expect_100_continue( - Request(method="GET", target="/", headers=[("Host", "example.com")]) - ) - # Case insensitive - assert has_expect_100_continue( - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Expect", "100-Continue")], - ) - ) - # Doesn't work in HTTP/1.0 - assert not has_expect_100_continue( - Request( - method="GET", - target="/", - headers=[("Host", "example.com"), ("Expect", "100-continue")], - http_version="1.0", - ) - ) diff --git a/myenv/Lib/site-packages/h11/tests/test_helpers.py b/myenv/Lib/site-packages/h11/tests/test_helpers.py deleted file mode 100644 index c329c76..0000000 --- a/myenv/Lib/site-packages/h11/tests/test_helpers.py +++ /dev/null @@ -1,32 +0,0 @@ -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .helpers import normalize_data_events - - -def test_normalize_data_events() -> None: - assert normalize_data_events( - [ - Data(data=bytearray(b"1")), - Data(data=b"2"), - Response(status_code=200, headers=[]), # type: ignore[arg-type] - Data(data=b"3"), - Data(data=b"4"), - EndOfMessage(), - Data(data=b"5"), - Data(data=b"6"), - Data(data=b"7"), - ] - ) == [ - Data(data=b"12"), - Response(status_code=200, headers=[]), # type: ignore[arg-type] - Data(data=b"34"), - EndOfMessage(), - Data(data=b"567"), - ] diff --git a/myenv/Lib/site-packages/h11/tests/test_io.py b/myenv/Lib/site-packages/h11/tests/test_io.py deleted file mode 100644 index 2b47c0e..0000000 --- a/myenv/Lib/site-packages/h11/tests/test_io.py +++ /dev/null @@ -1,572 +0,0 @@ -from typing import Any, Callable, Generator, List - -import pytest - -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .._headers import Headers, normalize_and_validate -from .._readers import ( - _obsolete_line_fold, - ChunkedReader, - ContentLengthReader, - Http10Reader, - READERS, -) -from .._receivebuffer import ReceiveBuffer -from .._state import ( - CLIENT, - CLOSED, - DONE, - IDLE, - MIGHT_SWITCH_PROTOCOL, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, - SWITCHED_PROTOCOL, -) -from .._util import LocalProtocolError -from .._writers import ( - ChunkedWriter, - ContentLengthWriter, - Http10Writer, - write_any_response, - write_headers, - write_request, - WRITERS, -) -from .helpers import normalize_data_events - -SIMPLE_CASES = [ - ( - (CLIENT, IDLE), - Request( - method="GET", - target="/a", - headers=[("Host", "foo"), ("Connection", "close")], - ), - b"GET /a HTTP/1.1\r\nHost: foo\r\nConnection: close\r\n\r\n", - ), - ( - (SERVER, SEND_RESPONSE), - Response(status_code=200, headers=[("Connection", "close")], reason=b"OK"), - b"HTTP/1.1 200 OK\r\nConnection: close\r\n\r\n", - ), - ( - (SERVER, SEND_RESPONSE), - Response(status_code=200, headers=[], reason=b"OK"), # type: ignore[arg-type] - b"HTTP/1.1 200 OK\r\n\r\n", - ), - ( - (SERVER, SEND_RESPONSE), - InformationalResponse( - status_code=101, headers=[("Upgrade", "websocket")], reason=b"Upgrade" - ), - b"HTTP/1.1 101 Upgrade\r\nUpgrade: websocket\r\n\r\n", - ), - ( - (SERVER, SEND_RESPONSE), - InformationalResponse(status_code=101, headers=[], reason=b"Upgrade"), # type: ignore[arg-type] - b"HTTP/1.1 101 Upgrade\r\n\r\n", - ), -] - - -def dowrite(writer: Callable[..., None], obj: Any) -> bytes: - got_list: List[bytes] = [] - writer(obj, got_list.append) - return b"".join(got_list) - - -def tw(writer: Any, obj: Any, expected: Any) -> None: - got = dowrite(writer, obj) - assert got == expected - - -def makebuf(data: bytes) -> ReceiveBuffer: - buf = ReceiveBuffer() - buf += data - return buf - - -def tr(reader: Any, data: bytes, expected: Any) -> None: - def check(got: Any) -> None: - assert got == expected - # Headers should always be returned as bytes, not e.g. bytearray - # https://github.com/python-hyper/wsproto/pull/54#issuecomment-377709478 - for name, value in getattr(got, "headers", []): - assert type(name) is bytes - assert type(value) is bytes - - # Simple: consume whole thing - buf = makebuf(data) - check(reader(buf)) - assert not buf - - # Incrementally growing buffer - buf = ReceiveBuffer() - for i in range(len(data)): - assert reader(buf) is None - buf += data[i : i + 1] - check(reader(buf)) - - # Trailing data - buf = makebuf(data) - buf += b"trailing" - check(reader(buf)) - assert bytes(buf) == b"trailing" - - -def test_writers_simple() -> None: - for ((role, state), event, binary) in SIMPLE_CASES: - tw(WRITERS[role, state], event, binary) - - -def test_readers_simple() -> None: - for ((role, state), event, binary) in SIMPLE_CASES: - tr(READERS[role, state], binary, event) - - -def test_writers_unusual() -> None: - # Simple test of the write_headers utility routine - tw( - write_headers, - normalize_and_validate([("foo", "bar"), ("baz", "quux")]), - b"foo: bar\r\nbaz: quux\r\n\r\n", - ) - tw(write_headers, Headers([]), b"\r\n") - - # We understand HTTP/1.0, but we don't speak it - with pytest.raises(LocalProtocolError): - tw( - write_request, - Request( - method="GET", - target="/", - headers=[("Host", "foo"), ("Connection", "close")], - http_version="1.0", - ), - None, - ) - with pytest.raises(LocalProtocolError): - tw( - write_any_response, - Response( - status_code=200, headers=[("Connection", "close")], http_version="1.0" - ), - None, - ) - - -def test_readers_unusual() -> None: - # Reading HTTP/1.0 - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.0\r\nSome: header\r\n\r\n", - Request( - method="HEAD", - target="/foo", - headers=[("Some", "header")], - http_version="1.0", - ), - ) - - # check no-headers, since it's only legal with HTTP/1.0 - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.0\r\n\r\n", - Request(method="HEAD", target="/foo", headers=[], http_version="1.0"), # type: ignore[arg-type] - ) - - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.0 200 OK\r\nSome: header\r\n\r\n", - Response( - status_code=200, - headers=[("Some", "header")], - http_version="1.0", - reason=b"OK", - ), - ) - - # single-character header values (actually disallowed by the ABNF in RFC - # 7230 -- this is a bug in the standard that we originally copied...) - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.0 200 OK\r\n" b"Foo: a a a a a \r\n\r\n", - Response( - status_code=200, - headers=[("Foo", "a a a a a")], - http_version="1.0", - reason=b"OK", - ), - ) - - # Empty headers -- also legal - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.0 200 OK\r\n" b"Foo:\r\n\r\n", - Response( - status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" - ), - ) - - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.0 200 OK\r\n" b"Foo: \t \t \r\n\r\n", - Response( - status_code=200, headers=[("Foo", "")], http_version="1.0", reason=b"OK" - ), - ) - - # Tolerate broken servers that leave off the response code - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.0 200\r\n" b"Foo: bar\r\n\r\n", - Response( - status_code=200, headers=[("Foo", "bar")], http_version="1.0", reason=b"" - ), - ) - - # Tolerate headers line endings (\r\n and \n) - # \n\r\b between headers and body - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.1 200 OK\r\nSomeHeader: val\n\r\n", - Response( - status_code=200, - headers=[("SomeHeader", "val")], - http_version="1.1", - reason="OK", - ), - ) - - # delimited only with \n - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.1 200 OK\nSomeHeader1: val1\nSomeHeader2: val2\n\n", - Response( - status_code=200, - headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], - http_version="1.1", - reason="OK", - ), - ) - - # mixed \r\n and \n - tr( - READERS[SERVER, SEND_RESPONSE], - b"HTTP/1.1 200 OK\r\nSomeHeader1: val1\nSomeHeader2: val2\n\r\n", - Response( - status_code=200, - headers=[("SomeHeader1", "val1"), ("SomeHeader2", "val2")], - http_version="1.1", - reason="OK", - ), - ) - - # obsolete line folding - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" - b"Host: example.com\r\n" - b"Some: multi-line\r\n" - b" header\r\n" - b"\tnonsense\r\n" - b" \t \t\tI guess\r\n" - b"Connection: close\r\n" - b"More-nonsense: in the\r\n" - b" last header \r\n\r\n", - Request( - method="HEAD", - target="/foo", - headers=[ - ("Host", "example.com"), - ("Some", "multi-line header nonsense I guess"), - ("Connection", "close"), - ("More-nonsense", "in the last header"), - ], - ), - ) - - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" b" folded: line\r\n\r\n", - None, - ) - - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" b"foo : line\r\n\r\n", - None, - ) - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", - None, - ) - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" b"foo\t: line\r\n\r\n", - None, - ) - with pytest.raises(LocalProtocolError): - tr(READERS[CLIENT, IDLE], b"HEAD /foo HTTP/1.1\r\n" b": line\r\n\r\n", None) - - -def test__obsolete_line_fold_bytes() -> None: - # _obsolete_line_fold has a defensive cast to bytearray, which is - # necessary to protect against O(n^2) behavior in case anyone ever passes - # in regular bytestrings... but right now we never pass in regular - # bytestrings. so this test just exists to get some coverage on that - # defensive cast. - assert list(_obsolete_line_fold([b"aaa", b"bbb", b" ccc", b"ddd"])) == [ - b"aaa", - bytearray(b"bbb ccc"), - b"ddd", - ] - - -def _run_reader_iter( - reader: Any, buf: bytes, do_eof: bool -) -> Generator[Any, None, None]: - while True: - event = reader(buf) - if event is None: - break - yield event - # body readers have undefined behavior after returning EndOfMessage, - # because this changes the state so they don't get called again - if type(event) is EndOfMessage: - break - if do_eof: - assert not buf - yield reader.read_eof() - - -def _run_reader(*args: Any) -> List[Event]: - events = list(_run_reader_iter(*args)) - return normalize_data_events(events) - - -def t_body_reader(thunk: Any, data: bytes, expected: Any, do_eof: bool = False) -> None: - # Simple: consume whole thing - print("Test 1") - buf = makebuf(data) - assert _run_reader(thunk(), buf, do_eof) == expected - - # Incrementally growing buffer - print("Test 2") - reader = thunk() - buf = ReceiveBuffer() - events = [] - for i in range(len(data)): - events += _run_reader(reader, buf, False) - buf += data[i : i + 1] - events += _run_reader(reader, buf, do_eof) - assert normalize_data_events(events) == expected - - is_complete = any(type(event) is EndOfMessage for event in expected) - if is_complete and not do_eof: - buf = makebuf(data + b"trailing") - assert _run_reader(thunk(), buf, False) == expected - - -def test_ContentLengthReader() -> None: - t_body_reader(lambda: ContentLengthReader(0), b"", [EndOfMessage()]) - - t_body_reader( - lambda: ContentLengthReader(10), - b"0123456789", - [Data(data=b"0123456789"), EndOfMessage()], - ) - - -def test_Http10Reader() -> None: - t_body_reader(Http10Reader, b"", [EndOfMessage()], do_eof=True) - t_body_reader(Http10Reader, b"asdf", [Data(data=b"asdf")], do_eof=False) - t_body_reader( - Http10Reader, b"asdf", [Data(data=b"asdf"), EndOfMessage()], do_eof=True - ) - - -def test_ChunkedReader() -> None: - t_body_reader(ChunkedReader, b"0\r\n\r\n", [EndOfMessage()]) - - t_body_reader( - ChunkedReader, - b"0\r\nSome: header\r\n\r\n", - [EndOfMessage(headers=[("Some", "header")])], - ) - - t_body_reader( - ChunkedReader, - b"5\r\n01234\r\n" - + b"10\r\n0123456789abcdef\r\n" - + b"0\r\n" - + b"Some: header\r\n\r\n", - [ - Data(data=b"012340123456789abcdef"), - EndOfMessage(headers=[("Some", "header")]), - ], - ) - - t_body_reader( - ChunkedReader, - b"5\r\n01234\r\n" + b"10\r\n0123456789abcdef\r\n" + b"0\r\n\r\n", - [Data(data=b"012340123456789abcdef"), EndOfMessage()], - ) - - # handles upper and lowercase hex - t_body_reader( - ChunkedReader, - b"aA\r\n" + b"x" * 0xAA + b"\r\n" + b"0\r\n\r\n", - [Data(data=b"x" * 0xAA), EndOfMessage()], - ) - - # refuses arbitrarily long chunk integers - with pytest.raises(LocalProtocolError): - # Technically this is legal HTTP/1.1, but we refuse to process chunk - # sizes that don't fit into 20 characters of hex - t_body_reader(ChunkedReader, b"9" * 100 + b"\r\nxxx", [Data(data=b"xxx")]) - - # refuses garbage in the chunk count - with pytest.raises(LocalProtocolError): - t_body_reader(ChunkedReader, b"10\x00\r\nxxx", None) - - # handles (and discards) "chunk extensions" omg wtf - t_body_reader( - ChunkedReader, - b"5; hello=there\r\n" - + b"xxxxx" - + b"\r\n" - + b'0; random="junk"; some=more; canbe=lonnnnngg\r\n\r\n', - [Data(data=b"xxxxx"), EndOfMessage()], - ) - - t_body_reader( - ChunkedReader, - b"5 \r\n01234\r\n" + b"0\r\n\r\n", - [Data(data=b"01234"), EndOfMessage()], - ) - - -def test_ContentLengthWriter() -> None: - w = ContentLengthWriter(5) - assert dowrite(w, Data(data=b"123")) == b"123" - assert dowrite(w, Data(data=b"45")) == b"45" - assert dowrite(w, EndOfMessage()) == b"" - - w = ContentLengthWriter(5) - with pytest.raises(LocalProtocolError): - dowrite(w, Data(data=b"123456")) - - w = ContentLengthWriter(5) - dowrite(w, Data(data=b"123")) - with pytest.raises(LocalProtocolError): - dowrite(w, Data(data=b"456")) - - w = ContentLengthWriter(5) - dowrite(w, Data(data=b"123")) - with pytest.raises(LocalProtocolError): - dowrite(w, EndOfMessage()) - - w = ContentLengthWriter(5) - dowrite(w, Data(data=b"123")) == b"123" - dowrite(w, Data(data=b"45")) == b"45" - with pytest.raises(LocalProtocolError): - dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) - - -def test_ChunkedWriter() -> None: - w = ChunkedWriter() - assert dowrite(w, Data(data=b"aaa")) == b"3\r\naaa\r\n" - assert dowrite(w, Data(data=b"a" * 20)) == b"14\r\n" + b"a" * 20 + b"\r\n" - - assert dowrite(w, Data(data=b"")) == b"" - - assert dowrite(w, EndOfMessage()) == b"0\r\n\r\n" - - assert ( - dowrite(w, EndOfMessage(headers=[("Etag", "asdf"), ("a", "b")])) - == b"0\r\nEtag: asdf\r\na: b\r\n\r\n" - ) - - -def test_Http10Writer() -> None: - w = Http10Writer() - assert dowrite(w, Data(data=b"1234")) == b"1234" - assert dowrite(w, EndOfMessage()) == b"" - - with pytest.raises(LocalProtocolError): - dowrite(w, EndOfMessage(headers=[("Etag", "asdf")])) - - -def test_reject_garbage_after_request_line() -> None: - with pytest.raises(LocalProtocolError): - tr(READERS[SERVER, SEND_RESPONSE], b"HTTP/1.0 200 OK\x00xxxx\r\n\r\n", None) - - -def test_reject_garbage_after_response_line() -> None: - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1 xxxxxx\r\n" b"Host: a\r\n\r\n", - None, - ) - - -def test_reject_garbage_in_header_line() -> None: - with pytest.raises(LocalProtocolError): - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" b"Host: foo\x00bar\r\n\r\n", - None, - ) - - -def test_reject_non_vchar_in_path() -> None: - for bad_char in b"\x00\x20\x7f\xee": - message = bytearray(b"HEAD /") - message.append(bad_char) - message.extend(b" HTTP/1.1\r\nHost: foobar\r\n\r\n") - with pytest.raises(LocalProtocolError): - tr(READERS[CLIENT, IDLE], message, None) - - -# https://github.com/python-hyper/h11/issues/57 -def test_allow_some_garbage_in_cookies() -> None: - tr( - READERS[CLIENT, IDLE], - b"HEAD /foo HTTP/1.1\r\n" - b"Host: foo\r\n" - b"Set-Cookie: ___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900\r\n" - b"\r\n", - Request( - method="HEAD", - target="/foo", - headers=[ - ("Host", "foo"), - ("Set-Cookie", "___utmvafIumyLc=kUd\x01UpAt; path=/; Max-Age=900"), - ], - ), - ) - - -def test_host_comes_first() -> None: - tw( - write_headers, - normalize_and_validate([("foo", "bar"), ("Host", "example.com")]), - b"Host: example.com\r\nfoo: bar\r\n\r\n", - ) diff --git a/myenv/Lib/site-packages/h11/tests/test_receivebuffer.py b/myenv/Lib/site-packages/h11/tests/test_receivebuffer.py deleted file mode 100644 index 21a3870..0000000 --- a/myenv/Lib/site-packages/h11/tests/test_receivebuffer.py +++ /dev/null @@ -1,135 +0,0 @@ -import re -from typing import Tuple - -import pytest - -from .._receivebuffer import ReceiveBuffer - - -def test_receivebuffer() -> None: - b = ReceiveBuffer() - assert not b - assert len(b) == 0 - assert bytes(b) == b"" - - b += b"123" - assert b - assert len(b) == 3 - assert bytes(b) == b"123" - - assert bytes(b) == b"123" - - assert b.maybe_extract_at_most(2) == b"12" - assert b - assert len(b) == 1 - assert bytes(b) == b"3" - - assert bytes(b) == b"3" - - assert b.maybe_extract_at_most(10) == b"3" - assert bytes(b) == b"" - - assert b.maybe_extract_at_most(10) is None - assert not b - - ################################################################ - # maybe_extract_until_next - ################################################################ - - b += b"123\n456\r\n789\r\n" - - assert b.maybe_extract_next_line() == b"123\n456\r\n" - assert bytes(b) == b"789\r\n" - - assert b.maybe_extract_next_line() == b"789\r\n" - assert bytes(b) == b"" - - b += b"12\r" - assert b.maybe_extract_next_line() is None - assert bytes(b) == b"12\r" - - b += b"345\n\r" - assert b.maybe_extract_next_line() is None - assert bytes(b) == b"12\r345\n\r" - - # here we stopped at the middle of b"\r\n" delimiter - - b += b"\n6789aaa123\r\n" - assert b.maybe_extract_next_line() == b"12\r345\n\r\n" - assert b.maybe_extract_next_line() == b"6789aaa123\r\n" - assert b.maybe_extract_next_line() is None - assert bytes(b) == b"" - - ################################################################ - # maybe_extract_lines - ################################################################ - - b += b"123\r\na: b\r\nfoo:bar\r\n\r\ntrailing" - lines = b.maybe_extract_lines() - assert lines == [b"123", b"a: b", b"foo:bar"] - assert bytes(b) == b"trailing" - - assert b.maybe_extract_lines() is None - - b += b"\r\n\r" - assert b.maybe_extract_lines() is None - - assert b.maybe_extract_at_most(100) == b"trailing\r\n\r" - assert not b - - # Empty body case (as happens at the end of chunked encoding if there are - # no trailing headers, e.g.) - b += b"\r\ntrailing" - assert b.maybe_extract_lines() == [] - assert bytes(b) == b"trailing" - - -@pytest.mark.parametrize( - "data", - [ - pytest.param( - ( - b"HTTP/1.1 200 OK\r\n", - b"Content-type: text/plain\r\n", - b"Connection: close\r\n", - b"\r\n", - b"Some body", - ), - id="with_crlf_delimiter", - ), - pytest.param( - ( - b"HTTP/1.1 200 OK\n", - b"Content-type: text/plain\n", - b"Connection: close\n", - b"\n", - b"Some body", - ), - id="with_lf_only_delimiter", - ), - pytest.param( - ( - b"HTTP/1.1 200 OK\n", - b"Content-type: text/plain\r\n", - b"Connection: close\n", - b"\n", - b"Some body", - ), - id="with_mixed_crlf_and_lf", - ), - ], -) -def test_receivebuffer_for_invalid_delimiter(data: Tuple[bytes]) -> None: - b = ReceiveBuffer() - - for line in data: - b += line - - lines = b.maybe_extract_lines() - - assert lines == [ - b"HTTP/1.1 200 OK", - b"Content-type: text/plain", - b"Connection: close", - ] - assert bytes(b) == b"Some body" diff --git a/myenv/Lib/site-packages/h11/tests/test_state.py b/myenv/Lib/site-packages/h11/tests/test_state.py deleted file mode 100644 index bc974e6..0000000 --- a/myenv/Lib/site-packages/h11/tests/test_state.py +++ /dev/null @@ -1,271 +0,0 @@ -import pytest - -from .._events import ( - ConnectionClosed, - Data, - EndOfMessage, - Event, - InformationalResponse, - Request, - Response, -) -from .._state import ( - _SWITCH_CONNECT, - _SWITCH_UPGRADE, - CLIENT, - CLOSED, - ConnectionState, - DONE, - IDLE, - MIGHT_SWITCH_PROTOCOL, - MUST_CLOSE, - SEND_BODY, - SEND_RESPONSE, - SERVER, - SWITCHED_PROTOCOL, -) -from .._util import LocalProtocolError - - -def test_ConnectionState() -> None: - cs = ConnectionState() - - # Basic event-triggered transitions - - assert cs.states == {CLIENT: IDLE, SERVER: IDLE} - - cs.process_event(CLIENT, Request) - # The SERVER-Request special case: - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - # Illegal transitions raise an error and nothing happens - with pytest.raises(LocalProtocolError): - cs.process_event(CLIENT, Request) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - cs.process_event(SERVER, InformationalResponse) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - cs.process_event(SERVER, Response) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_BODY} - - cs.process_event(CLIENT, EndOfMessage) - cs.process_event(SERVER, EndOfMessage) - assert cs.states == {CLIENT: DONE, SERVER: DONE} - - # State-triggered transition - - cs.process_event(SERVER, ConnectionClosed) - assert cs.states == {CLIENT: MUST_CLOSE, SERVER: CLOSED} - - -def test_ConnectionState_keep_alive() -> None: - # keep_alive = False - cs = ConnectionState() - cs.process_event(CLIENT, Request) - cs.process_keep_alive_disabled() - cs.process_event(CLIENT, EndOfMessage) - assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_RESPONSE} - - cs.process_event(SERVER, Response) - cs.process_event(SERVER, EndOfMessage) - assert cs.states == {CLIENT: MUST_CLOSE, SERVER: MUST_CLOSE} - - -def test_ConnectionState_keep_alive_in_DONE() -> None: - # Check that if keep_alive is disabled when the CLIENT is already in DONE, - # then this is sufficient to immediately trigger the DONE -> MUST_CLOSE - # transition - cs = ConnectionState() - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - assert cs.states[CLIENT] is DONE - cs.process_keep_alive_disabled() - assert cs.states[CLIENT] is MUST_CLOSE - - -def test_ConnectionState_switch_denied() -> None: - for switch_type in (_SWITCH_CONNECT, _SWITCH_UPGRADE): - for deny_early in (True, False): - cs = ConnectionState() - cs.process_client_switch_proposal(switch_type) - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, Data) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - assert switch_type in cs.pending_switch_proposals - - if deny_early: - # before client reaches DONE - cs.process_event(SERVER, Response) - assert not cs.pending_switch_proposals - - cs.process_event(CLIENT, EndOfMessage) - - if deny_early: - assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} - else: - assert cs.states == { - CLIENT: MIGHT_SWITCH_PROTOCOL, - SERVER: SEND_RESPONSE, - } - - cs.process_event(SERVER, InformationalResponse) - assert cs.states == { - CLIENT: MIGHT_SWITCH_PROTOCOL, - SERVER: SEND_RESPONSE, - } - - cs.process_event(SERVER, Response) - assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} - assert not cs.pending_switch_proposals - - -_response_type_for_switch = { - _SWITCH_UPGRADE: InformationalResponse, - _SWITCH_CONNECT: Response, - None: Response, -} - - -def test_ConnectionState_protocol_switch_accepted() -> None: - for switch_event in [_SWITCH_UPGRADE, _SWITCH_CONNECT]: - cs = ConnectionState() - cs.process_client_switch_proposal(switch_event) - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, Data) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - cs.process_event(CLIENT, EndOfMessage) - assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} - - cs.process_event(SERVER, InformationalResponse) - assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} - - cs.process_event(SERVER, _response_type_for_switch[switch_event], switch_event) - assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} - - -def test_ConnectionState_double_protocol_switch() -> None: - # CONNECT + Upgrade is legal! Very silly, but legal. So we support - # it. Because sometimes doing the silly thing is easier than not. - for server_switch in [None, _SWITCH_UPGRADE, _SWITCH_CONNECT]: - cs = ConnectionState() - cs.process_client_switch_proposal(_SWITCH_UPGRADE) - cs.process_client_switch_proposal(_SWITCH_CONNECT) - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} - cs.process_event( - SERVER, _response_type_for_switch[server_switch], server_switch - ) - if server_switch is None: - assert cs.states == {CLIENT: DONE, SERVER: SEND_BODY} - else: - assert cs.states == {CLIENT: SWITCHED_PROTOCOL, SERVER: SWITCHED_PROTOCOL} - - -def test_ConnectionState_inconsistent_protocol_switch() -> None: - for client_switches, server_switch in [ - ([], _SWITCH_CONNECT), - ([], _SWITCH_UPGRADE), - ([_SWITCH_UPGRADE], _SWITCH_CONNECT), - ([_SWITCH_CONNECT], _SWITCH_UPGRADE), - ]: - cs = ConnectionState() - for client_switch in client_switches: # type: ignore[attr-defined] - cs.process_client_switch_proposal(client_switch) - cs.process_event(CLIENT, Request) - with pytest.raises(LocalProtocolError): - cs.process_event(SERVER, Response, server_switch) - - -def test_ConnectionState_keepalive_protocol_switch_interaction() -> None: - # keep_alive=False + pending_switch_proposals - cs = ConnectionState() - cs.process_client_switch_proposal(_SWITCH_UPGRADE) - cs.process_event(CLIENT, Request) - cs.process_keep_alive_disabled() - cs.process_event(CLIENT, Data) - assert cs.states == {CLIENT: SEND_BODY, SERVER: SEND_RESPONSE} - - # the protocol switch "wins" - cs.process_event(CLIENT, EndOfMessage) - assert cs.states == {CLIENT: MIGHT_SWITCH_PROTOCOL, SERVER: SEND_RESPONSE} - - # but when the server denies the request, keep_alive comes back into play - cs.process_event(SERVER, Response) - assert cs.states == {CLIENT: MUST_CLOSE, SERVER: SEND_BODY} - - -def test_ConnectionState_reuse() -> None: - cs = ConnectionState() - - with pytest.raises(LocalProtocolError): - cs.start_next_cycle() - - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - - with pytest.raises(LocalProtocolError): - cs.start_next_cycle() - - cs.process_event(SERVER, Response) - cs.process_event(SERVER, EndOfMessage) - - cs.start_next_cycle() - assert cs.states == {CLIENT: IDLE, SERVER: IDLE} - - # No keepalive - - cs.process_event(CLIENT, Request) - cs.process_keep_alive_disabled() - cs.process_event(CLIENT, EndOfMessage) - cs.process_event(SERVER, Response) - cs.process_event(SERVER, EndOfMessage) - - with pytest.raises(LocalProtocolError): - cs.start_next_cycle() - - # One side closed - - cs = ConnectionState() - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - cs.process_event(CLIENT, ConnectionClosed) - cs.process_event(SERVER, Response) - cs.process_event(SERVER, EndOfMessage) - - with pytest.raises(LocalProtocolError): - cs.start_next_cycle() - - # Succesful protocol switch - - cs = ConnectionState() - cs.process_client_switch_proposal(_SWITCH_UPGRADE) - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - cs.process_event(SERVER, InformationalResponse, _SWITCH_UPGRADE) - - with pytest.raises(LocalProtocolError): - cs.start_next_cycle() - - # Failed protocol switch - - cs = ConnectionState() - cs.process_client_switch_proposal(_SWITCH_UPGRADE) - cs.process_event(CLIENT, Request) - cs.process_event(CLIENT, EndOfMessage) - cs.process_event(SERVER, Response) - cs.process_event(SERVER, EndOfMessage) - - cs.start_next_cycle() - assert cs.states == {CLIENT: IDLE, SERVER: IDLE} - - -def test_server_request_is_illegal() -> None: - # There used to be a bug in how we handled the Request special case that - # made this allowed... - cs = ConnectionState() - with pytest.raises(LocalProtocolError): - cs.process_event(SERVER, Request) diff --git a/myenv/Lib/site-packages/h11/tests/test_util.py b/myenv/Lib/site-packages/h11/tests/test_util.py deleted file mode 100644 index 79bc095..0000000 --- a/myenv/Lib/site-packages/h11/tests/test_util.py +++ /dev/null @@ -1,112 +0,0 @@ -import re -import sys -import traceback -from typing import NoReturn - -import pytest - -from .._util import ( - bytesify, - LocalProtocolError, - ProtocolError, - RemoteProtocolError, - Sentinel, - validate, -) - - -def test_ProtocolError() -> None: - with pytest.raises(TypeError): - ProtocolError("abstract base class") - - -def test_LocalProtocolError() -> None: - try: - raise LocalProtocolError("foo") - except LocalProtocolError as e: - assert str(e) == "foo" - assert e.error_status_hint == 400 - - try: - raise LocalProtocolError("foo", error_status_hint=418) - except LocalProtocolError as e: - assert str(e) == "foo" - assert e.error_status_hint == 418 - - def thunk() -> NoReturn: - raise LocalProtocolError("a", error_status_hint=420) - - try: - try: - thunk() - except LocalProtocolError as exc1: - orig_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) - exc1._reraise_as_remote_protocol_error() - except RemoteProtocolError as exc2: - assert type(exc2) is RemoteProtocolError - assert exc2.args == ("a",) - assert exc2.error_status_hint == 420 - new_traceback = "".join(traceback.format_tb(sys.exc_info()[2])) - assert new_traceback.endswith(orig_traceback) - - -def test_validate() -> None: - my_re = re.compile(rb"(?P[0-9]+)\.(?P[0-9]+)") - with pytest.raises(LocalProtocolError): - validate(my_re, b"0.") - - groups = validate(my_re, b"0.1") - assert groups == {"group1": b"0", "group2": b"1"} - - # successful partial matches are an error - must match whole string - with pytest.raises(LocalProtocolError): - validate(my_re, b"0.1xx") - with pytest.raises(LocalProtocolError): - validate(my_re, b"0.1\n") - - -def test_validate_formatting() -> None: - my_re = re.compile(rb"foo") - - with pytest.raises(LocalProtocolError) as excinfo: - validate(my_re, b"", "oops") - assert "oops" in str(excinfo.value) - - with pytest.raises(LocalProtocolError) as excinfo: - validate(my_re, b"", "oops {}") - assert "oops {}" in str(excinfo.value) - - with pytest.raises(LocalProtocolError) as excinfo: - validate(my_re, b"", "oops {} xx", 10) - assert "oops 10 xx" in str(excinfo.value) - - -def test_make_sentinel() -> None: - class S(Sentinel, metaclass=Sentinel): - pass - - assert repr(S) == "S" - assert S == S - assert type(S).__name__ == "S" - assert S in {S} - assert type(S) is S - - class S2(Sentinel, metaclass=Sentinel): - pass - - assert repr(S2) == "S2" - assert S != S2 - assert S not in {S2} - assert type(S) is not type(S2) - - -def test_bytesify() -> None: - assert bytesify(b"123") == b"123" - assert bytesify(bytearray(b"123")) == b"123" - assert bytesify("123") == b"123" - - with pytest.raises(UnicodeEncodeError): - bytesify("\u1234") - - with pytest.raises(TypeError): - bytesify(10) diff --git a/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/INSTALLER b/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/METADATA b/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/METADATA deleted file mode 100644 index 6be8e4b..0000000 --- a/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/METADATA +++ /dev/null @@ -1,613 +0,0 @@ -Metadata-Version: 2.3 -Name: httpcore -Version: 1.0.6 -Summary: A minimal low-level HTTP client. -Project-URL: Documentation, https://www.encode.io/httpcore -Project-URL: Homepage, https://www.encode.io/httpcore/ -Project-URL: Source, https://github.com/encode/httpcore -Author-email: Tom Christie -License-Expression: BSD-3-Clause -License-File: LICENSE.md -Classifier: Development Status :: 3 - Alpha -Classifier: Environment :: Web Environment -Classifier: Framework :: AsyncIO -Classifier: Framework :: Trio -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Internet :: WWW/HTTP -Requires-Python: >=3.8 -Requires-Dist: certifi -Requires-Dist: h11<0.15,>=0.13 -Provides-Extra: asyncio -Requires-Dist: anyio<5.0,>=4.0; extra == 'asyncio' -Provides-Extra: http2 -Requires-Dist: h2<5,>=3; extra == 'http2' -Provides-Extra: socks -Requires-Dist: socksio==1.*; extra == 'socks' -Provides-Extra: trio -Requires-Dist: trio<1.0,>=0.22.0; extra == 'trio' -Description-Content-Type: text/markdown - -# HTTP Core - -[![Test Suite](https://github.com/encode/httpcore/workflows/Test%20Suite/badge.svg)](https://github.com/encode/httpcore/actions) -[![Package version](https://badge.fury.io/py/httpcore.svg)](https://pypi.org/project/httpcore/) - -> *Do one thing, and do it well.* - -The HTTP Core package provides a minimal low-level HTTP client, which does -one thing only. Sending HTTP requests. - -It does not provide any high level model abstractions over the API, -does not handle redirects, multipart uploads, building authentication headers, -transparent HTTP caching, URL parsing, session cookie handling, -content or charset decoding, handling JSON, environment based configuration -defaults, or any of that Jazz. - -Some things HTTP Core does do: - -* Sending HTTP requests. -* Thread-safe / task-safe connection pooling. -* HTTP(S) proxy & SOCKS proxy support. -* Supports HTTP/1.1 and HTTP/2. -* Provides both sync and async interfaces. -* Async backend support for `asyncio` and `trio`. - -## Requirements - -Python 3.8+ - -## Installation - -For HTTP/1.1 only support, install with: - -```shell -$ pip install httpcore -``` - -There are also a number of optional extras available... - -```shell -$ pip install httpcore['asyncio,trio,http2,socks'] -``` - -## Sending requests - -Send an HTTP request: - -```python -import httpcore - -response = httpcore.request("GET", "https://www.example.com/") - -print(response) -# -print(response.status) -# 200 -print(response.headers) -# [(b'Accept-Ranges', b'bytes'), (b'Age', b'557328'), (b'Cache-Control', b'max-age=604800'), ...] -print(response.content) -# b'\n\n\nExample Domain\n\n\n ...' -``` - -The top-level `httpcore.request()` function is provided for convenience. In practice whenever you're working with `httpcore` you'll want to use the connection pooling functionality that it provides. - -```python -import httpcore - -http = httpcore.ConnectionPool() -response = http.request("GET", "https://www.example.com/") -``` - -Once you're ready to get going, [head over to the documentation](https://www.encode.io/httpcore/). - -## Motivation - -You *probably* don't want to be using HTTP Core directly. It might make sense if -you're writing something like a proxy service in Python, and you just want -something at the lowest possible level, but more typically you'll want to use -a higher level client library, such as `httpx`. - -The motivation for `httpcore` is: - -* To provide a reusable low-level client library, that other packages can then build on top of. -* To provide a *really clear interface split* between the networking code and client logic, - so that each is easier to understand and reason about in isolation. - -## Dependencies - -The `httpcore` package has the following dependencies... - -* `h11` -* `certifi` - -And the following optional extras... - -* `anyio` - Required by `pip install httpcore['asyncio']`. -* `trio` - Required by `pip install httpcore['trio']`. -* `h2` - Required by `pip install httpcore['http2']`. -* `socksio` - Required by `pip install httpcore['socks']`. - -## Versioning - -We use [SEMVER for our versioning policy](https://semver.org/). - -For changes between package versions please see our [project changelog](CHANGELOG.md). - -We recommend pinning your requirements either the most current major version, or a more specific version range: - -```python -pip install 'httpcore==1.*' -``` -# Changelog - -All notable changes to this project will be documented in this file. - -The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). - -## Version 1.0.6 (October 1st, 2024) - -- Relax `trio` dependency pinning. (#956) -- Handle `trio` raising `NotImplementedError` on unsupported platforms. (#955) -- Handle mapping `ssl.SSLError` to `httpcore.ConnectError`. (#918) - -## 1.0.5 (March 27th, 2024) - -- Handle `EndOfStream` exception for anyio backend. (#899) -- Allow trio `0.25.*` series in package dependancies. (#903) - -## 1.0.4 (February 21st, 2024) - -- Add `target` request extension. (#888) -- Fix support for connection `Upgrade` and `CONNECT` when some data in the stream has been read. (#882) - -## 1.0.3 (February 13th, 2024) - -- Fix support for async cancellations. (#880) -- Fix trace extension when used with socks proxy. (#849) -- Fix SSL context for connections using the "wss" scheme (#869) - -## 1.0.2 (November 10th, 2023) - -- Fix `float("inf")` timeouts in `Event.wait` function. (#846) - -## 1.0.1 (November 3rd, 2023) - -- Fix pool timeout to account for the total time spent retrying. (#823) -- Raise a neater RuntimeError when the correct async deps are not installed. (#826) -- Add support for synchronous TLS-in-TLS streams. (#840) - -## 1.0.0 (October 6th, 2023) - -From version 1.0 our async support is now optional, as the package has minimal dependencies by default. - -For async support use either `pip install 'httpcore[asyncio]'` or `pip install 'httpcore[trio]'`. - -The project versioning policy is now explicitly governed by SEMVER. See https://semver.org/. - -- Async support becomes fully optional. (#809) -- Add support for Python 3.12. (#807) - -## 0.18.0 (September 8th, 2023) - -- Add support for HTTPS proxies. (#745, #786) -- Drop Python 3.7 support. (#727) -- Handle `sni_hostname` extension with SOCKS proxy. (#774) -- Handle HTTP/1.1 half-closed connections gracefully. (#641) -- Change the type of `Extensions` from `Mapping[Str, Any]` to `MutableMapping[Str, Any]`. (#762) - -## 0.17.3 (July 5th, 2023) - -- Support async cancellations, ensuring that the connection pool is left in a clean state when cancellations occur. (#726) -- The networking backend interface has [been added to the public API](https://www.encode.io/httpcore/network-backends). Some classes which were previously private implementation detail are now part of the top-level public API. (#699) -- Graceful handling of HTTP/2 GoAway frames, with requests being transparently retried on a new connection. (#730) -- Add exceptions when a synchronous `trace callback` is passed to an asynchronous request or an asynchronous `trace callback` is passed to a synchronous request. (#717) -- Drop Python 3.7 support. (#727) - -## 0.17.2 (May 23th, 2023) - -- Add `socket_options` argument to `ConnectionPool` and `HTTProxy` classes. (#668) -- Improve logging with per-module logger names. (#690) -- Add `sni_hostname` request extension. (#696) -- Resolve race condition during import of `anyio` package. (#692) -- Enable TCP_NODELAY for all synchronous sockets. (#651) - -## 0.17.1 (May 17th, 2023) - -- If 'retries' is set, then allow retries if an SSL handshake error occurs. (#669) -- Improve correctness of tracebacks on network exceptions, by raising properly chained exceptions. (#678) -- Prevent connection-hanging behaviour when HTTP/2 connections are closed by a server-sent 'GoAway' frame. (#679) -- Fix edge-case exception when removing requests from the connection pool. (#680) -- Fix pool timeout edge-case. (#688) - -## 0.17.0 (March 16th, 2023) - -- Add DEBUG level logging. (#648) -- Respect HTTP/2 max concurrent streams when settings updates are sent by server. (#652) -- Increase the allowable HTTP header size to 100kB. (#647) -- Add `retries` option to SOCKS proxy classes. (#643) - -## 0.16.3 (December 20th, 2022) - -- Allow `ws` and `wss` schemes. Allows us to properly support websocket upgrade connections. (#625) -- Forwarding HTTP proxies use a connection-per-remote-host. Required by some proxy implementations. (#637) -- Don't raise `RuntimeError` when closing a connection pool with active connections. Removes some error cases when cancellations are used. (#631) -- Lazy import `anyio`, so that it's no longer a hard dependancy, and isn't imported if unused. (#639) - -## 0.16.2 (November 25th, 2022) - -- Revert 'Fix async cancellation behaviour', which introduced race conditions. (#627) -- Raise `RuntimeError` if attempting to us UNIX domain sockets on Windows. (#619) - -## 0.16.1 (November 17th, 2022) - -- Fix HTTP/1.1 interim informational responses, such as "100 Continue". (#605) - -## 0.16.0 (October 11th, 2022) - -- Support HTTP/1.1 informational responses. (#581) -- Fix async cancellation behaviour. (#580) -- Support `h11` 0.14. (#579) - -## 0.15.0 (May 17th, 2022) - -- Drop Python 3.6 support (#535) -- Ensure HTTP proxy CONNECT requests include `timeout` configuration. (#506) -- Switch to explicit `typing.Optional` for type hints. (#513) -- For `trio` map OSError exceptions to `ConnectError`. (#543) - -## 0.14.7 (February 4th, 2022) - -- Requests which raise a PoolTimeout need to be removed from the pool queue. (#502) -- Fix AttributeError that happened when Socks5Connection were terminated. (#501) - -## 0.14.6 (February 1st, 2022) - -- Fix SOCKS support for `http://` URLs. (#492) -- Resolve race condition around exceptions during streaming a response. (#491) - -## 0.14.5 (January 18th, 2022) - -- SOCKS proxy support. (#478) -- Add proxy_auth argument to HTTPProxy. (#481) -- Improve error message on 'RemoteProtocolError' exception when server disconnects without sending a response. (#479) - -## 0.14.4 (January 5th, 2022) - -- Support HTTP/2 on HTTPS tunnelling proxies. (#468) -- Fix proxy headers missing on HTTP forwarding. (#456) -- Only instantiate SSL context if required. (#457) -- More robust HTTP/2 handling. (#253, #439, #440, #441) - -## 0.14.3 (November 17th, 2021) - -- Fix race condition when removing closed connections from the pool. (#437) - -## 0.14.2 (November 16th, 2021) - -- Failed connections no longer remain in the pool. (Pull #433) - -## 0.14.1 (November 12th, 2021) - -- `max_connections` becomes optional. (Pull #429) -- `certifi` is now included in the install dependancies. (Pull #428) -- `h2` is now strictly optional. (Pull #428) - -## 0.14.0 (November 11th, 2021) - -The 0.14 release is a complete reworking of `httpcore`, comprehensively addressing some underlying issues in the connection pooling, as well as substantially redesigning the API to be more user friendly. - -Some of the lower-level API design also makes the components more easily testable in isolation, and the package now has 100% test coverage. - -See [discussion #419](https://github.com/encode/httpcore/discussions/419) for a little more background. - -There's some other neat bits in there too, such as the "trace" extension, which gives a hook into inspecting the internal events that occur during the request/response cycle. This extension is needed for the HTTPX cli, in order to... - -* Log the point at which the connection is established, and the IP/port on which it is made. -* Determine if the outgoing request should log as HTTP/1.1 or HTTP/2, rather than having to assume it's HTTP/2 if the --http2 flag was passed. (Which may not actually be true.) -* Log SSL version info / certificate info. - -Note that `curio` support is not currently available in 0.14.0. If you're using `httpcore` with `curio` please get in touch, so we can assess if we ought to prioritize it as a feature or not. - -## 0.13.7 (September 13th, 2021) - -- Fix broken error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #403) - -## 0.13.6 (June 15th, 2021) - -### Fixed - -- Close sockets when read or write timeouts occur. (Pull #365) - -## 0.13.5 (June 14th, 2021) - -### Fixed - -- Resolved niggles with AnyIO EOF behaviours. (Pull #358, #362) - -## 0.13.4 (June 9th, 2021) - -### Added - -- Improved error messaging when URL scheme is missing, or a non HTTP(S) scheme is used. (Pull #354) - -### Fixed - -- Switched to `anyio` as the default backend implementation when running with `asyncio`. Resolves some awkward [TLS timeout issues](https://github.com/encode/httpx/discussions/1511). - -## 0.13.3 (May 6th, 2021) - -### Added - -- Support HTTP/2 prior knowledge, using `httpcore.SyncConnectionPool(http1=False)`. (Pull #333) - -### Fixed - -- Handle cases where environment does not provide `select.poll` support. (Pull #331) - -## 0.13.2 (April 29th, 2021) - -### Added - -- Improve error message for specific case of `RemoteProtocolError` where server disconnects without sending a response. (Pull #313) - -## 0.13.1 (April 28th, 2021) - -### Fixed - -- More resiliant testing for closed connections. (Pull #311) -- Don't raise exceptions on ungraceful connection closes. (Pull #310) - -## 0.13.0 (April 21st, 2021) - -The 0.13 release updates the core API in order to match the HTTPX Transport API, -introduced in HTTPX 0.18 onwards. - -An example of making requests with the new interface is: - -```python -with httpcore.SyncConnectionPool() as http: - status_code, headers, stream, extensions = http.handle_request( - method=b'GET', - url=(b'https', b'example.org', 443, b'/'), - headers=[(b'host', b'example.org'), (b'user-agent', b'httpcore')] - stream=httpcore.ByteStream(b''), - extensions={} - ) - body = stream.read() - print(status_code, body) -``` - -### Changed - -- The `.request()` method is now `handle_request()`. (Pull #296) -- The `.arequest()` method is now `.handle_async_request()`. (Pull #296) -- The `headers` argument is no longer optional. (Pull #296) -- The `stream` argument is no longer optional. (Pull #296) -- The `ext` argument is now named `extensions`, and is no longer optional. (Pull #296) -- The `"reason"` extension keyword is now named `"reason_phrase"`. (Pull #296) -- The `"reason_phrase"` and `"http_version"` extensions now use byte strings for their values. (Pull #296) -- The `httpcore.PlainByteStream()` class becomes `httpcore.ByteStream()`. (Pull #296) - -### Added - -- Streams now support a `.read()` interface. (Pull #296) - -### Fixed - -- Task cancellation no longer leaks connections from the connection pool. (Pull #305) - -## 0.12.3 (December 7th, 2020) - -### Fixed - -- Abort SSL connections on close rather than waiting for remote EOF when using `asyncio`. (Pull #167) -- Fix exception raised in case of connect timeouts when using the `anyio` backend. (Pull #236) -- Fix `Host` header precedence for `:authority` in HTTP/2. (Pull #241, #243) -- Handle extra edge case when detecting for socket readability when using `asyncio`. (Pull #242, #244) -- Fix `asyncio` SSL warning when using proxy tunneling. (Pull #249) - -## 0.12.2 (November 20th, 2020) - -### Fixed - -- Properly wrap connect errors on the asyncio backend. (Pull #235) -- Fix `ImportError` occurring on Python 3.9 when using the HTTP/1.1 sync client in a multithreaded context. (Pull #237) - -## 0.12.1 (November 7th, 2020) - -### Added - -- Add connect retries. (Pull #221) - -### Fixed - -- Tweak detection of dropped connections, resolving an issue with open files limits on Linux. (Pull #185) -- Avoid leaking connections when establishing an HTTP tunnel to a proxy has failed. (Pull #223) -- Properly wrap OS errors when using `trio`. (Pull #225) - -## 0.12.0 (October 6th, 2020) - -### Changed - -- HTTP header casing is now preserved, rather than always sent in lowercase. (#216 and python-hyper/h11#104) - -### Added - -- Add Python 3.9 to officially supported versions. - -### Fixed - -- Gracefully handle a stdlib asyncio bug when a connection is closed while it is in a paused-for-reading state. (#201) - -## 0.11.1 (September 28nd, 2020) - -### Fixed - -- Add await to async semaphore release() coroutine (#197) -- Drop incorrect curio classifier (#192) - -## 0.11.0 (September 22nd, 2020) - -The Transport API with 0.11.0 has a couple of significant changes. - -Firstly we've moved changed the request interface in order to allow extensions, which will later enable us to support features -such as trailing headers, HTTP/2 server push, and CONNECT/Upgrade connections. - -The interface changes from: - -```python -def request(method, url, headers, stream, timeout): - return (http_version, status_code, reason, headers, stream) -``` - -To instead including an optional dictionary of extensions on the request and response: - -```python -def request(method, url, headers, stream, ext): - return (status_code, headers, stream, ext) -``` - -Having an open-ended extensions point will allow us to add later support for various optional features, that wouldn't otherwise be supported without these API changes. - -In particular: - -* Trailing headers support. -* HTTP/2 Server Push -* sendfile. -* Exposing raw connection on CONNECT, Upgrade, HTTP/2 bi-di streaming. -* Exposing debug information out of the API, including template name, template context. - -Currently extensions are limited to: - -* request: `timeout` - Optional. Timeout dictionary. -* response: `http_version` - Optional. Include the HTTP version used on the response. -* response: `reason` - Optional. Include the reason phrase used on the response. Only valid with HTTP/1.*. - -See https://github.com/encode/httpx/issues/1274#issuecomment-694884553 for the history behind this. - -Secondly, the async version of `request` is now namespaced as `arequest`. - -This allows concrete transports to support both sync and async implementations on the same class. - -### Added - -- Add curio support. (Pull #168) -- Add anyio support, with `backend="anyio"`. (Pull #169) - -### Changed - -- Update the Transport API to use 'ext' for optional extensions. (Pull #190) -- Update the Transport API to use `.request` and `.arequest` so implementations can support both sync and async. (Pull #189) - -## 0.10.2 (August 20th, 2020) - -### Added - -- Added Unix Domain Socket support. (Pull #139) - -### Fixed - -- Always include the port on proxy CONNECT requests. (Pull #154) -- Fix `max_keepalive_connections` configuration. (Pull #153) -- Fixes behaviour in HTTP/1.1 where server disconnects can be used to signal the end of the response body. (Pull #164) - -## 0.10.1 (August 7th, 2020) - -- Include `max_keepalive_connections` on `AsyncHTTPProxy`/`SyncHTTPProxy` classes. - -## 0.10.0 (August 7th, 2020) - -The most notable change in the 0.10.0 release is that HTTP/2 support is now fully optional. - -Use either `pip install httpcore` for HTTP/1.1 support only, or `pip install httpcore[http2]` for HTTP/1.1 and HTTP/2 support. - -### Added - -- HTTP/2 support becomes optional. (Pull #121, #130) -- Add `local_address=...` support. (Pull #100, #134) -- Add `PlainByteStream`, `IteratorByteStream`, `AsyncIteratorByteStream`. The `AsyncByteSteam` and `SyncByteStream` classes are now pure interface classes. (#133) -- Add `LocalProtocolError`, `RemoteProtocolError` exceptions. (Pull #129) -- Add `UnsupportedProtocol` exception. (Pull #128) -- Add `.get_connection_info()` method. (Pull #102, #137) -- Add better TRACE logs. (Pull #101) - -### Changed - -- `max_keepalive` is deprecated in favour of `max_keepalive_connections`. (Pull #140) - -### Fixed - -- Improve handling of server disconnects. (Pull #112) - -## 0.9.1 (May 27th, 2020) - -### Fixed - -- Proper host resolution for sync case, including IPv6 support. (Pull #97) -- Close outstanding connections when connection pool is closed. (Pull #98) - -## 0.9.0 (May 21th, 2020) - -### Changed - -- URL port becomes an `Optional[int]` instead of `int`. (Pull #92) - -### Fixed - -- Honor HTTP/2 max concurrent streams settings. (Pull #89, #90) -- Remove incorrect debug log. (Pull #83) - -## 0.8.4 (May 11th, 2020) - -### Added - -- Logging via HTTPCORE_LOG_LEVEL and HTTPX_LOG_LEVEL environment variables -and TRACE level logging. (Pull #79) - -### Fixed - -- Reuse of connections on HTTP/2 in close concurrency situations. (Pull #81) - -## 0.8.3 (May 6rd, 2020) - -### Fixed - -- Include `Host` and `Accept` headers on proxy "CONNECT" requests. -- De-duplicate any headers also contained in proxy_headers. -- HTTP/2 flag not being passed down to proxy connections. - -## 0.8.2 (May 3rd, 2020) - -### Fixed - -- Fix connections using proxy forwarding requests not being added to the -connection pool properly. (Pull #70) - -## 0.8.1 (April 30th, 2020) - -### Changed - -- Allow inherintance of both `httpcore.AsyncByteStream`, `httpcore.SyncByteStream` without type conflicts. - -## 0.8.0 (April 30th, 2020) - -### Fixed - -- Fixed tunnel proxy support. - -### Added - -- New `TimeoutException` base class. - -## 0.7.0 (March 5th, 2020) - -- First integration with HTTPX. diff --git a/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/RECORD b/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/RECORD deleted file mode 100644 index 6f22eef..0000000 --- a/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/RECORD +++ /dev/null @@ -1,68 +0,0 @@ -httpcore-1.0.6.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -httpcore-1.0.6.dist-info/METADATA,sha256=JUDMSknnJJYMim2rfax8xyCGZv82-XKHBhD1yvT6Tfg,21194 -httpcore-1.0.6.dist-info/RECORD,, -httpcore-1.0.6.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 -httpcore-1.0.6.dist-info/licenses/LICENSE.md,sha256=_ctZFUx0y6uhahEkL3dAvqnyPW_rVUeRfYxflKgDkqU,1518 -httpcore/__init__.py,sha256=3SEBbvCe1JleHxuWH9HM57SqKLs8Bp17UjahHiLPPJU,3337 -httpcore/__pycache__/__init__.cpython-312.pyc,, -httpcore/__pycache__/_api.cpython-312.pyc,, -httpcore/__pycache__/_exceptions.cpython-312.pyc,, -httpcore/__pycache__/_models.cpython-312.pyc,, -httpcore/__pycache__/_ssl.cpython-312.pyc,, -httpcore/__pycache__/_synchronization.cpython-312.pyc,, -httpcore/__pycache__/_trace.cpython-312.pyc,, -httpcore/__pycache__/_utils.cpython-312.pyc,, -httpcore/_api.py,sha256=IBR18qZQ8ETcghJXC1Gd-30WuKYRS0EyF2eC80_OBQ8,3167 -httpcore/_async/__init__.py,sha256=EWdl2v4thnAHzJpqjU4h2a8DUiGAvNiWrkii9pfhTf0,1221 -httpcore/_async/__pycache__/__init__.cpython-312.pyc,, -httpcore/_async/__pycache__/connection.cpython-312.pyc,, -httpcore/_async/__pycache__/connection_pool.cpython-312.pyc,, -httpcore/_async/__pycache__/http11.cpython-312.pyc,, -httpcore/_async/__pycache__/http2.cpython-312.pyc,, -httpcore/_async/__pycache__/http_proxy.cpython-312.pyc,, -httpcore/_async/__pycache__/interfaces.cpython-312.pyc,, -httpcore/_async/__pycache__/socks_proxy.cpython-312.pyc,, -httpcore/_async/connection.py,sha256=63vgzLIgX3bjq-RsjK68UWS_DWNkdvnKP0vJRK3Prfs,8484 -httpcore/_async/connection_pool.py,sha256=9wEoWJpmn7F4mBRPrSEkTCIrzpYs0HoFIE1s1fRmQT8,15612 -httpcore/_async/http11.py,sha256=yvohHUXwdJv9gN-dEJv4C5F8_NiyOtcflua1Q3BRjew,13978 -httpcore/_async/http2.py,sha256=_AgUDRcjAIlbncbOjW0I-iqYN1PDgRcFUIGfzZ2fKcI,23881 -httpcore/_async/http_proxy.py,sha256=hl4t-PahlAuCGtKNYRx4LSgjx1ZuspE9oDBaL6BOess,14851 -httpcore/_async/interfaces.py,sha256=J2iq9rs7x3nKS6iCfntjHY0Woast6V_HuXuE8rs3HmA,4486 -httpcore/_async/socks_proxy.py,sha256=T8y927RATyy4A9GMduRVUh13ZeRq8Ts8JP24bFVQ6n8,13934 -httpcore/_backends/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -httpcore/_backends/__pycache__/__init__.cpython-312.pyc,, -httpcore/_backends/__pycache__/anyio.cpython-312.pyc,, -httpcore/_backends/__pycache__/auto.cpython-312.pyc,, -httpcore/_backends/__pycache__/base.cpython-312.pyc,, -httpcore/_backends/__pycache__/mock.cpython-312.pyc,, -httpcore/_backends/__pycache__/sync.cpython-312.pyc,, -httpcore/_backends/__pycache__/trio.cpython-312.pyc,, -httpcore/_backends/anyio.py,sha256=LJz63ykFwi6efvflJbJAoEHyEY7zLF4wErPLWFilV2U,5334 -httpcore/_backends/auto.py,sha256=Q_iQjNuwJseqBxeYJYtiaGzFs08_LGI3K_egYrixEqE,1683 -httpcore/_backends/base.py,sha256=Qsb8b_PSiVP1ldHHGXHxQzJ1Qlzj2r8KR9KQeANkSbE,3218 -httpcore/_backends/mock.py,sha256=S4IADhC6kE22ge_jR_WHlEUkD6QAsXnwz26DSWZLcG4,4179 -httpcore/_backends/sync.py,sha256=LAomvc-MAlot5-S9CCFxnr561aDp9yhyfs_65WeCkZ4,8086 -httpcore/_backends/trio.py,sha256=INOeHEkA8pO6AsSqjColWcayM0FQSyGi1hpaQghjrCs,6078 -httpcore/_exceptions.py,sha256=7zb3KNiG0qmfUNIdFgdaUSbn2Pu3oztghi6Vg7i-LJU,1185 -httpcore/_models.py,sha256=7DlYrkyc2z-orQrnztCUmtBY4gLMz18FjPP9e5Q-fFg,16614 -httpcore/_ssl.py,sha256=srqmSNU4iOUvWF-SrJvb8G_YEbHFELOXQOwdDIBTS9c,187 -httpcore/_sync/__init__.py,sha256=JBDIgXt5la1LCJ1sLQeKhjKFpLnpNr8Svs6z2ni3fgg,1141 -httpcore/_sync/__pycache__/__init__.cpython-312.pyc,, -httpcore/_sync/__pycache__/connection.cpython-312.pyc,, -httpcore/_sync/__pycache__/connection_pool.cpython-312.pyc,, -httpcore/_sync/__pycache__/http11.cpython-312.pyc,, -httpcore/_sync/__pycache__/http2.cpython-312.pyc,, -httpcore/_sync/__pycache__/http_proxy.cpython-312.pyc,, -httpcore/_sync/__pycache__/interfaces.cpython-312.pyc,, -httpcore/_sync/__pycache__/socks_proxy.cpython-312.pyc,, -httpcore/_sync/connection.py,sha256=n7YFLjYsRv4cf0CXEIqNsUqR_NPNvFQN8dGqjj0mv9U,8273 -httpcore/_sync/connection_pool.py,sha256=kU_4Zp4z4e2tRSU_p8Q9KximA_TMO7fJZ4VYz0C2SlQ,15280 -httpcore/_sync/http11.py,sha256=9-IgEawTTbkHuOE8O3LODhp3KCJ4tAo5vmyA4UE66pU,13564 -httpcore/_sync/http2.py,sha256=_fPbMtCAVqGXKFYo3OmNNkucDuVTF69vMEbSFE2Jodo,23345 -httpcore/_sync/http_proxy.py,sha256=82oin8vjt2a7YmmVvz7sXEZSBuajK-mHDF-EwnR_pJ0,14613 -httpcore/_sync/interfaces.py,sha256=EM4PTf-rgkclzisFcrTyx1G8FwraoffE8rbckOznX_o,4365 -httpcore/_sync/socks_proxy.py,sha256=T13QSceeEAg1PM9Yh7Nk-DoqI28TIUqDS-9O3OSC9Uc,13707 -httpcore/_synchronization.py,sha256=iivvB5KJZRTATEKbDr7xe8oo9bHVR42slsDTEiQvBjI,9487 -httpcore/_trace.py,sha256=akf5PsWVq3rZjqmXniomU59OY37K7JHoeNDCQ4GU84E,3954 -httpcore/_utils.py,sha256=9QPh5ib4JilWX4dBCC_XO6wdBY4b0kbUGgfV3QfBANc,1525 -httpcore/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/WHEEL b/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/WHEEL deleted file mode 100644 index cdd68a4..0000000 --- a/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.25.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/licenses/LICENSE.md b/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/licenses/LICENSE.md deleted file mode 100644 index 311b2b5..0000000 --- a/myenv/Lib/site-packages/httpcore-1.0.6.dist-info/licenses/LICENSE.md +++ /dev/null @@ -1,27 +0,0 @@ -Copyright © 2020, [Encode OSS Ltd](https://www.encode.io/). -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/Lib/site-packages/httpcore/__init__.py b/myenv/Lib/site-packages/httpcore/__init__.py deleted file mode 100644 index 330745a..0000000 --- a/myenv/Lib/site-packages/httpcore/__init__.py +++ /dev/null @@ -1,139 +0,0 @@ -from ._api import request, stream -from ._async import ( - AsyncConnectionInterface, - AsyncConnectionPool, - AsyncHTTP2Connection, - AsyncHTTP11Connection, - AsyncHTTPConnection, - AsyncHTTPProxy, - AsyncSOCKSProxy, -) -from ._backends.base import ( - SOCKET_OPTION, - AsyncNetworkBackend, - AsyncNetworkStream, - NetworkBackend, - NetworkStream, -) -from ._backends.mock import AsyncMockBackend, AsyncMockStream, MockBackend, MockStream -from ._backends.sync import SyncBackend -from ._exceptions import ( - ConnectError, - ConnectionNotAvailable, - ConnectTimeout, - LocalProtocolError, - NetworkError, - PoolTimeout, - ProtocolError, - ProxyError, - ReadError, - ReadTimeout, - RemoteProtocolError, - TimeoutException, - UnsupportedProtocol, - WriteError, - WriteTimeout, -) -from ._models import URL, Origin, Request, Response -from ._ssl import default_ssl_context -from ._sync import ( - ConnectionInterface, - ConnectionPool, - HTTP2Connection, - HTTP11Connection, - HTTPConnection, - HTTPProxy, - SOCKSProxy, -) - -# The 'httpcore.AnyIOBackend' class is conditional on 'anyio' being installed. -try: - from ._backends.anyio import AnyIOBackend -except ImportError: # pragma: nocover - - class AnyIOBackend: # type: ignore - def __init__(self, *args, **kwargs): # type: ignore - msg = ( - "Attempted to use 'httpcore.AnyIOBackend' but 'anyio' is not installed." - ) - raise RuntimeError(msg) - - -# The 'httpcore.TrioBackend' class is conditional on 'trio' being installed. -try: - from ._backends.trio import TrioBackend -except ImportError: # pragma: nocover - - class TrioBackend: # type: ignore - def __init__(self, *args, **kwargs): # type: ignore - msg = "Attempted to use 'httpcore.TrioBackend' but 'trio' is not installed." - raise RuntimeError(msg) - - -__all__ = [ - # top-level requests - "request", - "stream", - # models - "Origin", - "URL", - "Request", - "Response", - # async - "AsyncHTTPConnection", - "AsyncConnectionPool", - "AsyncHTTPProxy", - "AsyncHTTP11Connection", - "AsyncHTTP2Connection", - "AsyncConnectionInterface", - "AsyncSOCKSProxy", - # sync - "HTTPConnection", - "ConnectionPool", - "HTTPProxy", - "HTTP11Connection", - "HTTP2Connection", - "ConnectionInterface", - "SOCKSProxy", - # network backends, implementations - "SyncBackend", - "AnyIOBackend", - "TrioBackend", - # network backends, mock implementations - "AsyncMockBackend", - "AsyncMockStream", - "MockBackend", - "MockStream", - # network backends, interface - "AsyncNetworkStream", - "AsyncNetworkBackend", - "NetworkStream", - "NetworkBackend", - # util - "default_ssl_context", - "SOCKET_OPTION", - # exceptions - "ConnectionNotAvailable", - "ProxyError", - "ProtocolError", - "LocalProtocolError", - "RemoteProtocolError", - "UnsupportedProtocol", - "TimeoutException", - "PoolTimeout", - "ConnectTimeout", - "ReadTimeout", - "WriteTimeout", - "NetworkError", - "ConnectError", - "ReadError", - "WriteError", -] - -__version__ = "1.0.6" - - -__locals = locals() -for __name in __all__: - if not __name.startswith("__"): - setattr(__locals[__name], "__module__", "httpcore") # noqa diff --git a/myenv/Lib/site-packages/httpcore/_api.py b/myenv/Lib/site-packages/httpcore/_api.py deleted file mode 100644 index 854235f..0000000 --- a/myenv/Lib/site-packages/httpcore/_api.py +++ /dev/null @@ -1,92 +0,0 @@ -from contextlib import contextmanager -from typing import Iterator, Optional, Union - -from ._models import URL, Extensions, HeaderTypes, Response -from ._sync.connection_pool import ConnectionPool - - -def request( - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, -) -> Response: - """ - Sends an HTTP request, returning the response. - - ``` - response = httpcore.request("GET", "https://www.example.com/") - ``` - - Arguments: - method: The HTTP method for the request. Typically one of `"GET"`, - `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. - url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, - or as str/bytes. - headers: The HTTP request headers. Either as a dictionary of str/bytes, - or as a list of two-tuples of str/bytes. - content: The content of the request body. Either as bytes, - or as a bytes iterator. - extensions: A dictionary of optional extra information included on the request. - Possible keys include `"timeout"`. - - Returns: - An instance of `httpcore.Response`. - """ - with ConnectionPool() as pool: - return pool.request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - - -@contextmanager -def stream( - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, -) -> Iterator[Response]: - """ - Sends an HTTP request, returning the response within a content manager. - - ``` - with httpcore.stream("GET", "https://www.example.com/") as response: - ... - ``` - - When using the `stream()` function, the body of the response will not be - automatically read. If you want to access the response body you should - either use `content = response.read()`, or `for chunk in response.iter_content()`. - - Arguments: - method: The HTTP method for the request. Typically one of `"GET"`, - `"OPTIONS"`, `"HEAD"`, `"POST"`, `"PUT"`, `"PATCH"`, or `"DELETE"`. - url: The URL of the HTTP request. Either as an instance of `httpcore.URL`, - or as str/bytes. - headers: The HTTP request headers. Either as a dictionary of str/bytes, - or as a list of two-tuples of str/bytes. - content: The content of the request body. Either as bytes, - or as a bytes iterator. - extensions: A dictionary of optional extra information included on the request. - Possible keys include `"timeout"`. - - Returns: - An instance of `httpcore.Response`. - """ - with ConnectionPool() as pool: - with pool.stream( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) as response: - yield response diff --git a/myenv/Lib/site-packages/httpcore/_async/__init__.py b/myenv/Lib/site-packages/httpcore/_async/__init__.py deleted file mode 100644 index 88dc7f0..0000000 --- a/myenv/Lib/site-packages/httpcore/_async/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -from .connection import AsyncHTTPConnection -from .connection_pool import AsyncConnectionPool -from .http11 import AsyncHTTP11Connection -from .http_proxy import AsyncHTTPProxy -from .interfaces import AsyncConnectionInterface - -try: - from .http2 import AsyncHTTP2Connection -except ImportError: # pragma: nocover - - class AsyncHTTP2Connection: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use http2 support, but the `h2` package is not " - "installed. Use 'pip install httpcore[http2]'." - ) - - -try: - from .socks_proxy import AsyncSOCKSProxy -except ImportError: # pragma: nocover - - class AsyncSOCKSProxy: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use SOCKS support, but the `socksio` package is not " - "installed. Use 'pip install httpcore[socks]'." - ) - - -__all__ = [ - "AsyncHTTPConnection", - "AsyncConnectionPool", - "AsyncHTTPProxy", - "AsyncHTTP11Connection", - "AsyncHTTP2Connection", - "AsyncConnectionInterface", - "AsyncSOCKSProxy", -] diff --git a/myenv/Lib/site-packages/httpcore/_async/connection.py b/myenv/Lib/site-packages/httpcore/_async/connection.py deleted file mode 100644 index 2f439cf..0000000 --- a/myenv/Lib/site-packages/httpcore/_async/connection.py +++ /dev/null @@ -1,220 +0,0 @@ -import itertools -import logging -import ssl -from types import TracebackType -from typing import Iterable, Iterator, Optional, Type - -from .._backends.auto import AutoBackend -from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream -from .._exceptions import ConnectError, ConnectTimeout -from .._models import Origin, Request, Response -from .._ssl import default_ssl_context -from .._synchronization import AsyncLock -from .._trace import Trace -from .http11 import AsyncHTTP11Connection -from .interfaces import AsyncConnectionInterface - -RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. - - -logger = logging.getLogger("httpcore.connection") - - -def exponential_backoff(factor: float) -> Iterator[float]: - """ - Generate a geometric sequence that has a ratio of 2 and starts with 0. - - For example: - - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` - - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` - """ - yield 0 - for n in itertools.count(): - yield factor * 2**n - - -class AsyncHTTPConnection(AsyncConnectionInterface): - def __init__( - self, - origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - self._origin = origin - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._network_backend: AsyncNetworkBackend = ( - AutoBackend() if network_backend is None else network_backend - ) - self._connection: Optional[AsyncConnectionInterface] = None - self._connect_failed: bool = False - self._request_lock = AsyncLock() - self._socket_options = socket_options - - async def handle_async_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection to {self._origin}" - ) - - try: - async with self._request_lock: - if self._connection is None: - stream = await self._connect(request) - - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import AsyncHTTP2Connection - - self._connection = AsyncHTTP2Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = AsyncHTTP11Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except BaseException as exc: - self._connect_failed = True - raise exc - - return await self._connection.handle_async_request(request) - - async def _connect(self, request: Request) -> AsyncNetworkStream: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - retries_left = self._retries - delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) - - while True: - try: - if self._uds is None: - kwargs = { - "host": self._origin.host.decode("ascii"), - "port": self._origin.port, - "local_address": self._local_address, - "timeout": timeout, - "socket_options": self._socket_options, - } - async with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = await self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - else: - kwargs = { - "path": self._uds, - "timeout": timeout, - "socket_options": self._socket_options, - } - async with Trace( - "connect_unix_socket", logger, request, kwargs - ) as trace: - stream = await self._network_backend.connect_unix_socket( - **kwargs - ) - trace.return_value = stream - - if self._origin.scheme in (b"https", b"wss"): - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._origin.host.decode("ascii"), - "timeout": timeout, - } - async with Trace("start_tls", logger, request, kwargs) as trace: - stream = await stream.start_tls(**kwargs) - trace.return_value = stream - return stream - except (ConnectError, ConnectTimeout): - if retries_left <= 0: - raise - retries_left -= 1 - delay = next(delays) - async with Trace("retry", logger, request, kwargs) as trace: - await self._network_backend.sleep(delay) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - async def aclose(self) -> None: - if self._connection is not None: - async with Trace("close", logger, None, {}): - await self._connection.aclose() - - def is_available(self) -> bool: - if self._connection is None: - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - async def __aenter__(self) -> "AsyncHTTPConnection": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - await self.aclose() diff --git a/myenv/Lib/site-packages/httpcore/_async/connection_pool.py b/myenv/Lib/site-packages/httpcore/_async/connection_pool.py deleted file mode 100644 index 214dfc4..0000000 --- a/myenv/Lib/site-packages/httpcore/_async/connection_pool.py +++ /dev/null @@ -1,380 +0,0 @@ -import ssl -import sys -from types import TracebackType -from typing import AsyncIterable, AsyncIterator, Iterable, List, Optional, Type - -from .._backends.auto import AutoBackend -from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend -from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol -from .._models import Origin, Request, Response -from .._synchronization import AsyncEvent, AsyncShieldCancellation, AsyncThreadLock -from .connection import AsyncHTTPConnection -from .interfaces import AsyncConnectionInterface, AsyncRequestInterface - - -class AsyncPoolRequest: - def __init__(self, request: Request) -> None: - self.request = request - self.connection: Optional[AsyncConnectionInterface] = None - self._connection_acquired = AsyncEvent() - - def assign_to_connection( - self, connection: Optional[AsyncConnectionInterface] - ) -> None: - self.connection = connection - self._connection_acquired.set() - - def clear_connection(self) -> None: - self.connection = None - self._connection_acquired = AsyncEvent() - - async def wait_for_connection( - self, timeout: Optional[float] = None - ) -> AsyncConnectionInterface: - if self.connection is None: - await self._connection_acquired.wait(timeout=timeout) - assert self.connection is not None - return self.connection - - def is_queued(self) -> bool: - return self.connection is None - - -class AsyncConnectionPool(AsyncRequestInterface): - """ - A connection pool for making HTTP requests. - """ - - def __init__( - self, - ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish a - connection. - local_address: Local address to connect from. Can also be used to connect - using a particular address family. Using `local_address="0.0.0.0"` - will connect using an `AF_INET` address (IPv4), while using - `local_address="::"` will connect using an `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - socket_options: Socket options that have to be included - in the TCP socket when the connection was established. - """ - self._ssl_context = ssl_context - - self._max_connections = ( - sys.maxsize if max_connections is None else max_connections - ) - self._max_keepalive_connections = ( - sys.maxsize - if max_keepalive_connections is None - else max_keepalive_connections - ) - self._max_keepalive_connections = min( - self._max_connections, self._max_keepalive_connections - ) - - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._network_backend = ( - AutoBackend() if network_backend is None else network_backend - ) - self._socket_options = socket_options - - # The mutable state on a connection pool is the queue of incoming requests, - # and the set of connections that are servicing those requests. - self._connections: List[AsyncConnectionInterface] = [] - self._requests: List[AsyncPoolRequest] = [] - - # We only mutate the state of the connection pool within an 'optional_thread_lock' - # context. This holds a threading lock unless we're running in async mode, - # in which case it is a no-op. - self._optional_thread_lock = AsyncThreadLock() - - def create_connection(self, origin: Origin) -> AsyncConnectionInterface: - return AsyncHTTPConnection( - origin=origin, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - retries=self._retries, - local_address=self._local_address, - uds=self._uds, - network_backend=self._network_backend, - socket_options=self._socket_options, - ) - - @property - def connections(self) -> List[AsyncConnectionInterface]: - """ - Return a list of the connections currently in the pool. - - For example: - - ```python - >>> pool.connections - [ - , - , - , - ] - ``` - """ - return list(self._connections) - - async def handle_async_request(self, request: Request) -> Response: - """ - Send an HTTP request, and return an HTTP response. - - This is the core implementation that is called into by `.request()` or `.stream()`. - """ - scheme = request.url.scheme.decode() - if scheme == "": - raise UnsupportedProtocol( - "Request URL is missing an 'http://' or 'https://' protocol." - ) - if scheme not in ("http", "https", "ws", "wss"): - raise UnsupportedProtocol( - f"Request URL has an unsupported protocol '{scheme}://'." - ) - - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("pool", None) - - with self._optional_thread_lock: - # Add the incoming request to our request queue. - pool_request = AsyncPoolRequest(request) - self._requests.append(pool_request) - - try: - while True: - with self._optional_thread_lock: - # Assign incoming requests to available connections, - # closing or creating new connections as required. - closing = self._assign_requests_to_connections() - await self._close_connections(closing) - - # Wait until this request has an assigned connection. - connection = await pool_request.wait_for_connection(timeout=timeout) - - try: - # Send the request on the assigned connection. - response = await connection.handle_async_request( - pool_request.request - ) - except ConnectionNotAvailable: - # In some cases a connection may initially be available to - # handle a request, but then become unavailable. - # - # In this case we clear the connection and try again. - pool_request.clear_connection() - else: - break # pragma: nocover - - except BaseException as exc: - with self._optional_thread_lock: - # For any exception or cancellation we remove the request from - # the queue, and then re-assign requests to connections. - self._requests.remove(pool_request) - closing = self._assign_requests_to_connections() - - await self._close_connections(closing) - raise exc from None - - # Return the response. Note that in this case we still have to manage - # the point at which the response is closed. - assert isinstance(response.stream, AsyncIterable) - return Response( - status=response.status, - headers=response.headers, - content=PoolByteStream( - stream=response.stream, pool_request=pool_request, pool=self - ), - extensions=response.extensions, - ) - - def _assign_requests_to_connections(self) -> List[AsyncConnectionInterface]: - """ - Manage the state of the connection pool, assigning incoming - requests to connections as available. - - Called whenever a new request is added or removed from the pool. - - Any closing connections are returned, allowing the I/O for closing - those connections to be handled seperately. - """ - closing_connections = [] - - # First we handle cleaning up any connections that are closed, - # have expired their keep-alive, or surplus idle connections. - for connection in list(self._connections): - if connection.is_closed(): - # log: "removing closed connection" - self._connections.remove(connection) - elif connection.has_expired(): - # log: "closing expired connection" - self._connections.remove(connection) - closing_connections.append(connection) - elif ( - connection.is_idle() - and len([connection.is_idle() for connection in self._connections]) - > self._max_keepalive_connections - ): - # log: "closing idle connection" - self._connections.remove(connection) - closing_connections.append(connection) - - # Assign queued requests to connections. - queued_requests = [request for request in self._requests if request.is_queued()] - for pool_request in queued_requests: - origin = pool_request.request.url.origin - available_connections = [ - connection - for connection in self._connections - if connection.can_handle_request(origin) and connection.is_available() - ] - idle_connections = [ - connection for connection in self._connections if connection.is_idle() - ] - - # There are three cases for how we may be able to handle the request: - # - # 1. There is an existing connection that can handle the request. - # 2. We can create a new connection to handle the request. - # 3. We can close an idle connection and then create a new connection - # to handle the request. - if available_connections: - # log: "reusing existing connection" - connection = available_connections[0] - pool_request.assign_to_connection(connection) - elif len(self._connections) < self._max_connections: - # log: "creating new connection" - connection = self.create_connection(origin) - self._connections.append(connection) - pool_request.assign_to_connection(connection) - elif idle_connections: - # log: "closing idle connection" - connection = idle_connections[0] - self._connections.remove(connection) - closing_connections.append(connection) - # log: "creating new connection" - connection = self.create_connection(origin) - self._connections.append(connection) - pool_request.assign_to_connection(connection) - - return closing_connections - - async def _close_connections(self, closing: List[AsyncConnectionInterface]) -> None: - # Close connections which have been removed from the pool. - with AsyncShieldCancellation(): - for connection in closing: - await connection.aclose() - - async def aclose(self) -> None: - # Explicitly close the connection pool. - # Clears all existing requests and connections. - with self._optional_thread_lock: - closing_connections = list(self._connections) - self._connections = [] - await self._close_connections(closing_connections) - - async def __aenter__(self) -> "AsyncConnectionPool": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - await self.aclose() - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - with self._optional_thread_lock: - request_is_queued = [request.is_queued() for request in self._requests] - connection_is_idle = [ - connection.is_idle() for connection in self._connections - ] - - num_active_requests = request_is_queued.count(False) - num_queued_requests = request_is_queued.count(True) - num_active_connections = connection_is_idle.count(False) - num_idle_connections = connection_is_idle.count(True) - - requests_info = ( - f"Requests: {num_active_requests} active, {num_queued_requests} queued" - ) - connection_info = ( - f"Connections: {num_active_connections} active, {num_idle_connections} idle" - ) - - return f"<{class_name} [{requests_info} | {connection_info}]>" - - -class PoolByteStream: - def __init__( - self, - stream: AsyncIterable[bytes], - pool_request: AsyncPoolRequest, - pool: AsyncConnectionPool, - ) -> None: - self._stream = stream - self._pool_request = pool_request - self._pool = pool - self._closed = False - - async def __aiter__(self) -> AsyncIterator[bytes]: - try: - async for part in self._stream: - yield part - except BaseException as exc: - await self.aclose() - raise exc from None - - async def aclose(self) -> None: - if not self._closed: - self._closed = True - with AsyncShieldCancellation(): - if hasattr(self._stream, "aclose"): - await self._stream.aclose() - - with self._pool._optional_thread_lock: - self._pool._requests.remove(self._pool_request) - closing = self._pool._assign_requests_to_connections() - - await self._pool._close_connections(closing) diff --git a/myenv/Lib/site-packages/httpcore/_async/http11.py b/myenv/Lib/site-packages/httpcore/_async/http11.py deleted file mode 100644 index 0493a92..0000000 --- a/myenv/Lib/site-packages/httpcore/_async/http11.py +++ /dev/null @@ -1,386 +0,0 @@ -import enum -import logging -import ssl -import time -from types import TracebackType -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - List, - Optional, - Tuple, - Type, - Union, -) - -import h11 - -from .._backends.base import AsyncNetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, - WriteError, - map_exceptions, -) -from .._models import Origin, Request, Response -from .._synchronization import AsyncLock, AsyncShieldCancellation -from .._trace import Trace -from .interfaces import AsyncConnectionInterface - -logger = logging.getLogger("httpcore.http11") - - -# A subset of `h11.Event` types supported by `_send_event` -H11SendEvent = Union[ - h11.Request, - h11.Data, - h11.EndOfMessage, -] - - -class HTTPConnectionState(enum.IntEnum): - NEW = 0 - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class AsyncHTTP11Connection(AsyncConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 - - def __init__( - self, - origin: Origin, - stream: AsyncNetworkStream, - keepalive_expiry: Optional[float] = None, - ) -> None: - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: Optional[float] = keepalive_expiry - self._expire_at: Optional[float] = None - self._state = HTTPConnectionState.NEW - self._state_lock = AsyncLock() - self._request_count = 0 - self._h11_state = h11.Connection( - our_role=h11.CLIENT, - max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, - ) - - async def handle_async_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - async with self._state_lock: - if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): - self._request_count += 1 - self._state = HTTPConnectionState.ACTIVE - self._expire_at = None - else: - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request} - try: - async with Trace( - "send_request_headers", logger, request, kwargs - ) as trace: - await self._send_request_headers(**kwargs) - async with Trace("send_request_body", logger, request, kwargs) as trace: - await self._send_request_body(**kwargs) - except WriteError: - # If we get a write error while we're writing the request, - # then we supress this error and move on to attempting to - # read the response. Servers can sometimes close the request - # pre-emptively and then respond with a well formed HTTP - # error response. - pass - - async with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - ( - http_version, - status, - reason_phrase, - headers, - trailing_data, - ) = await self._receive_response_headers(**kwargs) - trace.return_value = ( - http_version, - status, - reason_phrase, - headers, - ) - - network_stream = self._network_stream - - # CONNECT or Upgrade request - if (status == 101) or ( - (request.method == b"CONNECT") and (200 <= status < 300) - ): - network_stream = AsyncHTTP11UpgradeStream(network_stream, trailing_data) - - return Response( - status=status, - headers=headers, - content=HTTP11ConnectionByteStream(self, request), - extensions={ - "http_version": http_version, - "reason_phrase": reason_phrase, - "network_stream": network_stream, - }, - ) - except BaseException as exc: - with AsyncShieldCancellation(): - async with Trace("response_closed", logger, request) as trace: - await self._response_closed() - raise exc - - # Sending the request... - - async def _send_request_headers(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): - event = h11.Request( - method=request.method, - target=request.url.target, - headers=request.headers, - ) - await self._send_event(event, timeout=timeout) - - async def _send_request_body(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - assert isinstance(request.stream, AsyncIterable) - async for chunk in request.stream: - event = h11.Data(data=chunk) - await self._send_event(event, timeout=timeout) - - await self._send_event(h11.EndOfMessage(), timeout=timeout) - - async def _send_event( - self, event: h11.Event, timeout: Optional[float] = None - ) -> None: - bytes_to_send = self._h11_state.send(event) - if bytes_to_send is not None: - await self._network_stream.write(bytes_to_send, timeout=timeout) - - # Receiving the response... - - async def _receive_response_headers( - self, request: Request - ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = await self._receive_event(timeout=timeout) - if isinstance(event, h11.Response): - break - if ( - isinstance(event, h11.InformationalResponse) - and event.status_code == 101 - ): - break - - http_version = b"HTTP/" + event.http_version - - # h11 version 0.11+ supports a `raw_items` interface to get the - # raw header casing, rather than the enforced lowercase headers. - headers = event.headers.raw_items() - - trailing_data, _ = self._h11_state.trailing_data - - return http_version, event.status_code, event.reason, headers, trailing_data - - async def _receive_response_body(self, request: Request) -> AsyncIterator[bytes]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = await self._receive_event(timeout=timeout) - if isinstance(event, h11.Data): - yield bytes(event.data) - elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): - break - - async def _receive_event( - self, timeout: Optional[float] = None - ) -> Union[h11.Event, Type[h11.PAUSED]]: - while True: - with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): - event = self._h11_state.next_event() - - if event is h11.NEED_DATA: - data = await self._network_stream.read( - self.READ_NUM_BYTES, timeout=timeout - ) - - # If we feed this case through h11 we'll raise an exception like: - # - # httpcore.RemoteProtocolError: can't handle event type - # ConnectionClosed when role=SERVER and state=SEND_RESPONSE - # - # Which is accurate, but not very informative from an end-user - # perspective. Instead we handle this case distinctly and treat - # it as a ConnectError. - if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: - msg = "Server disconnected without sending a response." - raise RemoteProtocolError(msg) - - self._h11_state.receive_data(data) - else: - # mypy fails to narrow the type in the above if statement above - return event # type: ignore[return-value] - - async def _response_closed(self) -> None: - async with self._state_lock: - if ( - self._h11_state.our_state is h11.DONE - and self._h11_state.their_state is h11.DONE - ): - self._state = HTTPConnectionState.IDLE - self._h11_state.start_next_cycle() - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - else: - await self.aclose() - - # Once the connection is no longer required... - - async def aclose(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._state = HTTPConnectionState.CLOSED - await self._network_stream.aclose() - - # The AsyncConnectionInterface methods provide information about the state of - # the connection, allowing for a connection pooling implementation to - # determine when to reuse and when to close the connection... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - # Note that HTTP/1.1 connections in the "NEW" state are not treated as - # being "available". The control flow which created the connection will - # be able to send an outgoing request, but the connection will not be - # acquired from the connection pool for any other request. - return self._state == HTTPConnectionState.IDLE - - def has_expired(self) -> bool: - now = time.monotonic() - keepalive_expired = self._expire_at is not None and now > self._expire_at - - # If the HTTP connection is idle but the socket is readable, then the - # only valid state is that the socket is about to return b"", indicating - # a server-initiated disconnect. - server_disconnected = ( - self._state == HTTPConnectionState.IDLE - and self._network_stream.get_extra_info("is_readable") - ) - - return keepalive_expired or server_disconnected - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/1.1, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - async def __aenter__(self) -> "AsyncHTTP11Connection": - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - await self.aclose() - - -class HTTP11ConnectionByteStream: - def __init__(self, connection: AsyncHTTP11Connection, request: Request) -> None: - self._connection = connection - self._request = request - self._closed = False - - async def __aiter__(self) -> AsyncIterator[bytes]: - kwargs = {"request": self._request} - try: - async with Trace("receive_response_body", logger, self._request, kwargs): - async for chunk in self._connection._receive_response_body(**kwargs): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with AsyncShieldCancellation(): - await self.aclose() - raise exc - - async def aclose(self) -> None: - if not self._closed: - self._closed = True - async with Trace("response_closed", logger, self._request): - await self._connection._response_closed() - - -class AsyncHTTP11UpgradeStream(AsyncNetworkStream): - def __init__(self, stream: AsyncNetworkStream, leading_data: bytes) -> None: - self._stream = stream - self._leading_data = leading_data - - async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: - if self._leading_data: - buffer = self._leading_data[:max_bytes] - self._leading_data = self._leading_data[max_bytes:] - return buffer - else: - return await self._stream.read(max_bytes, timeout) - - async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: - await self._stream.write(buffer, timeout) - - async def aclose(self) -> None: - await self._stream.aclose() - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: Optional[str] = None, - timeout: Optional[float] = None, - ) -> AsyncNetworkStream: - return await self._stream.start_tls(ssl_context, server_hostname, timeout) - - def get_extra_info(self, info: str) -> Any: - return self._stream.get_extra_info(info) diff --git a/myenv/Lib/site-packages/httpcore/_async/http2.py b/myenv/Lib/site-packages/httpcore/_async/http2.py deleted file mode 100644 index c201ee4..0000000 --- a/myenv/Lib/site-packages/httpcore/_async/http2.py +++ /dev/null @@ -1,589 +0,0 @@ -import enum -import logging -import time -import types -import typing - -import h2.config -import h2.connection -import h2.events -import h2.exceptions -import h2.settings - -from .._backends.base import AsyncNetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, -) -from .._models import Origin, Request, Response -from .._synchronization import AsyncLock, AsyncSemaphore, AsyncShieldCancellation -from .._trace import Trace -from .interfaces import AsyncConnectionInterface - -logger = logging.getLogger("httpcore.http2") - - -def has_body_headers(request: Request) -> bool: - return any( - k.lower() == b"content-length" or k.lower() == b"transfer-encoding" - for k, v in request.headers - ) - - -class HTTPConnectionState(enum.IntEnum): - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class AsyncHTTP2Connection(AsyncConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) - - def __init__( - self, - origin: Origin, - stream: AsyncNetworkStream, - keepalive_expiry: typing.Optional[float] = None, - ): - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: typing.Optional[float] = keepalive_expiry - self._h2_state = h2.connection.H2Connection(config=self.CONFIG) - self._state = HTTPConnectionState.IDLE - self._expire_at: typing.Optional[float] = None - self._request_count = 0 - self._init_lock = AsyncLock() - self._state_lock = AsyncLock() - self._read_lock = AsyncLock() - self._write_lock = AsyncLock() - self._sent_connection_init = False - self._used_all_stream_ids = False - self._connection_error = False - - # Mapping from stream ID to response stream events. - self._events: typing.Dict[ - int, - typing.Union[ - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ], - ] = {} - - # Connection terminated events are stored as state since - # we need to handle them for all streams. - self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = ( - None - ) - - self._read_exception: typing.Optional[Exception] = None - self._write_exception: typing.Optional[Exception] = None - - async def handle_async_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - # This cannot occur in normal operation, since the connection pool - # will only send requests on connections that handle them. - # It's in place simply for resilience as a guard against incorrect - # usage, for anyone working directly with httpcore connections. - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - async with self._state_lock: - if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): - self._request_count += 1 - self._expire_at = None - self._state = HTTPConnectionState.ACTIVE - else: - raise ConnectionNotAvailable() - - async with self._init_lock: - if not self._sent_connection_init: - try: - kwargs = {"request": request} - async with Trace("send_connection_init", logger, request, kwargs): - await self._send_connection_init(**kwargs) - except BaseException as exc: - with AsyncShieldCancellation(): - await self.aclose() - raise exc - - self._sent_connection_init = True - - # Initially start with just 1 until the remote server provides - # its max_concurrent_streams value - self._max_streams = 1 - - local_settings_max_streams = ( - self._h2_state.local_settings.max_concurrent_streams - ) - self._max_streams_semaphore = AsyncSemaphore(local_settings_max_streams) - - for _ in range(local_settings_max_streams - self._max_streams): - await self._max_streams_semaphore.acquire() - - await self._max_streams_semaphore.acquire() - - try: - stream_id = self._h2_state.get_next_available_stream_id() - self._events[stream_id] = [] - except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover - self._used_all_stream_ids = True - self._request_count -= 1 - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request, "stream_id": stream_id} - async with Trace("send_request_headers", logger, request, kwargs): - await self._send_request_headers(request=request, stream_id=stream_id) - async with Trace("send_request_body", logger, request, kwargs): - await self._send_request_body(request=request, stream_id=stream_id) - async with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - status, headers = await self._receive_response( - request=request, stream_id=stream_id - ) - trace.return_value = (status, headers) - - return Response( - status=status, - headers=headers, - content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), - extensions={ - "http_version": b"HTTP/2", - "network_stream": self._network_stream, - "stream_id": stream_id, - }, - ) - except BaseException as exc: # noqa: PIE786 - with AsyncShieldCancellation(): - kwargs = {"stream_id": stream_id} - async with Trace("response_closed", logger, request, kwargs): - await self._response_closed(stream_id=stream_id) - - if isinstance(exc, h2.exceptions.ProtocolError): - # One case where h2 can raise a protocol error is when a - # closed frame has been seen by the state machine. - # - # This happens when one stream is reading, and encounters - # a GOAWAY event. Other flows of control may then raise - # a protocol error at any point they interact with the 'h2_state'. - # - # In this case we'll have stored the event, and should raise - # it as a RemoteProtocolError. - if self._connection_terminated: # pragma: nocover - raise RemoteProtocolError(self._connection_terminated) - # If h2 raises a protocol error in some other state then we - # must somehow have made a protocol violation. - raise LocalProtocolError(exc) # pragma: nocover - - raise exc - - async def _send_connection_init(self, request: Request) -> None: - """ - The HTTP/2 connection requires some initial setup before we can start - using individual request/response streams on it. - """ - # Need to set these manually here instead of manipulating via - # __setitem__() otherwise the H2Connection will emit SettingsUpdate - # frames in addition to sending the undesired defaults. - self._h2_state.local_settings = h2.settings.Settings( - client=True, - initial_values={ - # Disable PUSH_PROMISE frames from the server since we don't do anything - # with them for now. Maybe when we support caching? - h2.settings.SettingCodes.ENABLE_PUSH: 0, - # These two are taken from h2 for safe defaults - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, - h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, - }, - ) - - # Some websites (*cough* Yahoo *cough*) balk at this setting being - # present in the initial handshake since it's not defined in the original - # RFC despite the RFC mandating ignoring settings you don't know about. - del self._h2_state.local_settings[ - h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL - ] - - self._h2_state.initiate_connection() - self._h2_state.increment_flow_control_window(2**24) - await self._write_outgoing_data(request) - - # Sending the request... - - async def _send_request_headers(self, request: Request, stream_id: int) -> None: - """ - Send the request headers to a given stream ID. - """ - end_stream = not has_body_headers(request) - - # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. - # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require - # HTTP/1.1 style headers, and map them appropriately if we end up on - # an HTTP/2 connection. - authority = [v for k, v in request.headers if k.lower() == b"host"][0] - - headers = [ - (b":method", request.method), - (b":authority", authority), - (b":scheme", request.url.scheme), - (b":path", request.url.target), - ] + [ - (k.lower(), v) - for k, v in request.headers - if k.lower() - not in ( - b"host", - b"transfer-encoding", - ) - ] - - self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) - self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) - await self._write_outgoing_data(request) - - async def _send_request_body(self, request: Request, stream_id: int) -> None: - """ - Iterate over the request body sending it to a given stream ID. - """ - if not has_body_headers(request): - return - - assert isinstance(request.stream, typing.AsyncIterable) - async for data in request.stream: - await self._send_stream_data(request, stream_id, data) - await self._send_end_stream(request, stream_id) - - async def _send_stream_data( - self, request: Request, stream_id: int, data: bytes - ) -> None: - """ - Send a single chunk of data in one or more data frames. - """ - while data: - max_flow = await self._wait_for_outgoing_flow(request, stream_id) - chunk_size = min(len(data), max_flow) - chunk, data = data[:chunk_size], data[chunk_size:] - self._h2_state.send_data(stream_id, chunk) - await self._write_outgoing_data(request) - - async def _send_end_stream(self, request: Request, stream_id: int) -> None: - """ - Send an empty data frame on on a given stream ID with the END_STREAM flag set. - """ - self._h2_state.end_stream(stream_id) - await self._write_outgoing_data(request) - - # Receiving the response... - - async def _receive_response( - self, request: Request, stream_id: int - ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: - """ - Return the response status code and headers for a given stream ID. - """ - while True: - event = await self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.ResponseReceived): - break - - status_code = 200 - headers = [] - for k, v in event.headers: - if k == b":status": - status_code = int(v.decode("ascii", errors="ignore")) - elif not k.startswith(b":"): - headers.append((k, v)) - - return (status_code, headers) - - async def _receive_response_body( - self, request: Request, stream_id: int - ) -> typing.AsyncIterator[bytes]: - """ - Iterator that returns the bytes of the response body for a given stream ID. - """ - while True: - event = await self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.DataReceived): - amount = event.flow_controlled_length - self._h2_state.acknowledge_received_data(amount, stream_id) - await self._write_outgoing_data(request) - yield event.data - elif isinstance(event, h2.events.StreamEnded): - break - - async def _receive_stream_event( - self, request: Request, stream_id: int - ) -> typing.Union[ - h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded - ]: - """ - Return the next available event for a given stream ID. - - Will read more data from the network if required. - """ - while not self._events.get(stream_id): - await self._receive_events(request, stream_id) - event = self._events[stream_id].pop(0) - if isinstance(event, h2.events.StreamReset): - raise RemoteProtocolError(event) - return event - - async def _receive_events( - self, request: Request, stream_id: typing.Optional[int] = None - ) -> None: - """ - Read some data from the network until we see one or more events - for a given stream ID. - """ - async with self._read_lock: - if self._connection_terminated is not None: - last_stream_id = self._connection_terminated.last_stream_id - if stream_id and last_stream_id and stream_id > last_stream_id: - self._request_count -= 1 - raise ConnectionNotAvailable() - raise RemoteProtocolError(self._connection_terminated) - - # This conditional is a bit icky. We don't want to block reading if we've - # actually got an event to return for a given stream. We need to do that - # check *within* the atomic read lock. Though it also need to be optional, - # because when we call it from `_wait_for_outgoing_flow` we *do* want to - # block until we've available flow control, event when we have events - # pending for the stream ID we're attempting to send on. - if stream_id is None or not self._events.get(stream_id): - events = await self._read_incoming_data(request) - for event in events: - if isinstance(event, h2.events.RemoteSettingsChanged): - async with Trace( - "receive_remote_settings", logger, request - ) as trace: - await self._receive_remote_settings_change(event) - trace.return_value = event - - elif isinstance( - event, - ( - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ), - ): - if event.stream_id in self._events: - self._events[event.stream_id].append(event) - - elif isinstance(event, h2.events.ConnectionTerminated): - self._connection_terminated = event - - await self._write_outgoing_data(request) - - async def _receive_remote_settings_change(self, event: h2.events.Event) -> None: - max_concurrent_streams = event.changed_settings.get( - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS - ) - if max_concurrent_streams: - new_max_streams = min( - max_concurrent_streams.new_value, - self._h2_state.local_settings.max_concurrent_streams, - ) - if new_max_streams and new_max_streams != self._max_streams: - while new_max_streams > self._max_streams: - await self._max_streams_semaphore.release() - self._max_streams += 1 - while new_max_streams < self._max_streams: - await self._max_streams_semaphore.acquire() - self._max_streams -= 1 - - async def _response_closed(self, stream_id: int) -> None: - await self._max_streams_semaphore.release() - del self._events[stream_id] - async with self._state_lock: - if self._connection_terminated and not self._events: - await self.aclose() - - elif self._state == HTTPConnectionState.ACTIVE and not self._events: - self._state = HTTPConnectionState.IDLE - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - if self._used_all_stream_ids: # pragma: nocover - await self.aclose() - - async def aclose(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._h2_state.close_connection() - self._state = HTTPConnectionState.CLOSED - await self._network_stream.aclose() - - # Wrappers around network read/write operations... - - async def _read_incoming_data( - self, request: Request - ) -> typing.List[h2.events.Event]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - if self._read_exception is not None: - raise self._read_exception # pragma: nocover - - try: - data = await self._network_stream.read(self.READ_NUM_BYTES, timeout) - if data == b"": - raise RemoteProtocolError("Server disconnected") - except Exception as exc: - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future reads. - # (For example, this means that a single read timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._read_exception = exc - self._connection_error = True - raise exc - - events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) - - return events - - async def _write_outgoing_data(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - async with self._write_lock: - data_to_send = self._h2_state.data_to_send() - - if self._write_exception is not None: - raise self._write_exception # pragma: nocover - - try: - await self._network_stream.write(data_to_send, timeout) - except Exception as exc: # pragma: nocover - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future write. - # (For example, this means that a single write timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._write_exception = exc - self._connection_error = True - raise exc - - # Flow control... - - async def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: - """ - Returns the maximum allowable outgoing flow for a given stream. - - If the allowable flow is zero, then waits on the network until - WindowUpdated frames have increased the flow rate. - https://tools.ietf.org/html/rfc7540#section-6.9 - """ - local_flow: int = self._h2_state.local_flow_control_window(stream_id) - max_frame_size: int = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - while flow == 0: - await self._receive_events(request) - local_flow = self._h2_state.local_flow_control_window(stream_id) - max_frame_size = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - return flow - - # Interface for connection pooling... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - return ( - self._state != HTTPConnectionState.CLOSED - and not self._connection_error - and not self._used_all_stream_ids - and not ( - self._h2_state.state_machine.state - == h2.connection.ConnectionState.CLOSED - ) - ) - - def has_expired(self) -> bool: - now = time.monotonic() - return self._expire_at is not None and now > self._expire_at - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/2, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - async def __aenter__(self) -> "AsyncHTTP2Connection": - return self - - async def __aexit__( - self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[types.TracebackType] = None, - ) -> None: - await self.aclose() - - -class HTTP2ConnectionByteStream: - def __init__( - self, connection: AsyncHTTP2Connection, request: Request, stream_id: int - ) -> None: - self._connection = connection - self._request = request - self._stream_id = stream_id - self._closed = False - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - kwargs = {"request": self._request, "stream_id": self._stream_id} - try: - async with Trace("receive_response_body", logger, self._request, kwargs): - async for chunk in self._connection._receive_response_body( - request=self._request, stream_id=self._stream_id - ): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with AsyncShieldCancellation(): - await self.aclose() - raise exc - - async def aclose(self) -> None: - if not self._closed: - self._closed = True - kwargs = {"stream_id": self._stream_id} - async with Trace("response_closed", logger, self._request, kwargs): - await self._connection._response_closed(stream_id=self._stream_id) diff --git a/myenv/Lib/site-packages/httpcore/_async/http_proxy.py b/myenv/Lib/site-packages/httpcore/_async/http_proxy.py deleted file mode 100644 index 4aa7d87..0000000 --- a/myenv/Lib/site-packages/httpcore/_async/http_proxy.py +++ /dev/null @@ -1,368 +0,0 @@ -import logging -import ssl -from base64 import b64encode -from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union - -from .._backends.base import SOCKET_OPTION, AsyncNetworkBackend -from .._exceptions import ProxyError -from .._models import ( - URL, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, -) -from .._ssl import default_ssl_context -from .._synchronization import AsyncLock -from .._trace import Trace -from .connection import AsyncHTTPConnection -from .connection_pool import AsyncConnectionPool -from .http11 import AsyncHTTP11Connection -from .interfaces import AsyncConnectionInterface - -HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] -HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] - - -logger = logging.getLogger("httpcore.proxy") - - -def merge_headers( - default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, -) -> List[Tuple[bytes, bytes]]: - """ - Append default_headers and override_headers, de-duplicating if a key exists - in both cases. - """ - default_headers = [] if default_headers is None else list(default_headers) - override_headers = [] if override_headers is None else list(override_headers) - has_override = set(key.lower() for key, value in override_headers) - default_headers = [ - (key, value) - for key, value in default_headers - if key.lower() not in has_override - ] - return default_headers + override_headers - - -def build_auth_header(username: bytes, password: bytes) -> bytes: - userpass = username + b":" + password - return b"Basic " + b64encode(userpass) - - -class AsyncHTTPProxy(AsyncConnectionPool): - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: Union[URL, bytes, str], - proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - proxy_auth: Any proxy authentication as a two-tuple of - (username, password). May be either bytes or ascii-only str. - proxy_headers: Any HTTP headers to use for the proxy requests. - For example `{"Proxy-Authorization": "Basic :"}`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - local_address=local_address, - uds=uds, - socket_options=socket_options, - ) - - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if ( - self._proxy_url.scheme == b"http" and proxy_ssl_context is not None - ): # pragma: no cover - raise RuntimeError( - "The `proxy_ssl_context` argument is not allowed for the http scheme" - ) - - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - if proxy_auth is not None: - username = enforce_bytes(proxy_auth[0], name="proxy_auth") - password = enforce_bytes(proxy_auth[1], name="proxy_auth") - authorization = build_auth_header(username, password) - self._proxy_headers = [ - (b"Proxy-Authorization", authorization) - ] + self._proxy_headers - - def create_connection(self, origin: Origin) -> AsyncConnectionInterface: - if origin.scheme == b"http": - return AsyncForwardHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - keepalive_expiry=self._keepalive_expiry, - network_backend=self._network_backend, - proxy_ssl_context=self._proxy_ssl_context, - ) - return AsyncTunnelHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - ssl_context=self._ssl_context, - proxy_ssl_context=self._proxy_ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class AsyncForwardHTTPConnection(AsyncConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - keepalive_expiry: Optional[float] = None, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - ) -> None: - self._connection = AsyncHTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._remote_origin = remote_origin - - async def handle_async_request(self, request: Request) -> Response: - headers = merge_headers(self._proxy_headers, request.headers) - url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=bytes(request.url), - ) - proxy_request = Request( - method=request.method, - url=url, - headers=headers, - content=request.stream, - extensions=request.extensions, - ) - return await self._connection.handle_async_request(proxy_request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - async def aclose(self) -> None: - await self._connection.aclose() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - -class AsyncTunnelHTTPConnection(AsyncConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - network_backend: Optional[AsyncNetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - self._connection: AsyncConnectionInterface = AsyncHTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._connect_lock = AsyncLock() - self._connected = False - - async def handle_async_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("connect", None) - - async with self._connect_lock: - if not self._connected: - target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) - - connect_url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=target, - ) - connect_headers = merge_headers( - [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers - ) - connect_request = Request( - method=b"CONNECT", - url=connect_url, - headers=connect_headers, - extensions=request.extensions, - ) - connect_response = await self._connection.handle_async_request( - connect_request - ) - - if connect_response.status < 200 or connect_response.status > 299: - reason_bytes = connect_response.extensions.get("reason_phrase", b"") - reason_str = reason_bytes.decode("ascii", errors="ignore") - msg = "%d %s" % (connect_response.status, reason_str) - await self._connection.aclose() - raise ProxyError(msg) - - stream = connect_response.extensions["network_stream"] - - # Upgrade the stream to SSL - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - async with Trace("start_tls", logger, request, kwargs) as trace: - stream = await stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import AsyncHTTP2Connection - - self._connection = AsyncHTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = AsyncHTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - - self._connected = True - return await self._connection.handle_async_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - async def aclose(self) -> None: - await self._connection.aclose() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/myenv/Lib/site-packages/httpcore/_async/interfaces.py b/myenv/Lib/site-packages/httpcore/_async/interfaces.py deleted file mode 100644 index c998dd2..0000000 --- a/myenv/Lib/site-packages/httpcore/_async/interfaces.py +++ /dev/null @@ -1,135 +0,0 @@ -from contextlib import asynccontextmanager -from typing import AsyncIterator, Optional, Union - -from .._models import ( - URL, - Extensions, - HeaderTypes, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, - include_request_headers, -) - - -class AsyncRequestInterface: - async def request( - self, - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, AsyncIterator[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> Response: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = await self.handle_async_request(request) - try: - await response.aread() - finally: - await response.aclose() - return response - - @asynccontextmanager - async def stream( - self, - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, AsyncIterator[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> AsyncIterator[Response]: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = await self.handle_async_request(request) - try: - yield response - finally: - await response.aclose() - - async def handle_async_request(self, request: Request) -> Response: - raise NotImplementedError() # pragma: nocover - - -class AsyncConnectionInterface(AsyncRequestInterface): - async def aclose(self) -> None: - raise NotImplementedError() # pragma: nocover - - def info(self) -> str: - raise NotImplementedError() # pragma: nocover - - def can_handle_request(self, origin: Origin) -> bool: - raise NotImplementedError() # pragma: nocover - - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an - outgoing request. - - An HTTP/1.1 connection will only be available if it is currently idle. - - An HTTP/2 connection will be available so long as the stream ID space is - not yet exhausted, and the connection is not in an error state. - - While the connection is being established we may not yet know if it is going - to result in an HTTP/1.1 or HTTP/2 connection. The connection should be - treated as being available, but might ultimately raise `NewConnectionRequired` - required exceptions if multiple requests are attempted over a connection - that ends up being established as HTTP/1.1. - """ - raise NotImplementedError() # pragma: nocover - - def has_expired(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - - This either means that the connection is idle and it has passed the - expiry time on its keep-alive, or that server has sent an EOF. - """ - raise NotImplementedError() # pragma: nocover - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - raise NotImplementedError() # pragma: nocover - - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - - Used when a response is closed to determine if the connection may be - returned to the connection pool or not. - """ - raise NotImplementedError() # pragma: nocover diff --git a/myenv/Lib/site-packages/httpcore/_async/socks_proxy.py b/myenv/Lib/site-packages/httpcore/_async/socks_proxy.py deleted file mode 100644 index f839603..0000000 --- a/myenv/Lib/site-packages/httpcore/_async/socks_proxy.py +++ /dev/null @@ -1,342 +0,0 @@ -import logging -import ssl -import typing - -from socksio import socks5 - -from .._backends.auto import AutoBackend -from .._backends.base import AsyncNetworkBackend, AsyncNetworkStream -from .._exceptions import ConnectionNotAvailable, ProxyError -from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url -from .._ssl import default_ssl_context -from .._synchronization import AsyncLock -from .._trace import Trace -from .connection_pool import AsyncConnectionPool -from .http11 import AsyncHTTP11Connection -from .interfaces import AsyncConnectionInterface - -logger = logging.getLogger("httpcore.socks") - - -AUTH_METHODS = { - b"\x00": "NO AUTHENTICATION REQUIRED", - b"\x01": "GSSAPI", - b"\x02": "USERNAME/PASSWORD", - b"\xff": "NO ACCEPTABLE METHODS", -} - -REPLY_CODES = { - b"\x00": "Succeeded", - b"\x01": "General SOCKS server failure", - b"\x02": "Connection not allowed by ruleset", - b"\x03": "Network unreachable", - b"\x04": "Host unreachable", - b"\x05": "Connection refused", - b"\x06": "TTL expired", - b"\x07": "Command not supported", - b"\x08": "Address type not supported", -} - - -async def _init_socks5_connection( - stream: AsyncNetworkStream, - *, - host: bytes, - port: int, - auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, -) -> None: - conn = socks5.SOCKS5Connection() - - # Auth method request - auth_method = ( - socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED - if auth is None - else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD - ) - conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) - outgoing_bytes = conn.data_to_send() - await stream.write(outgoing_bytes) - - # Auth method response - incoming_bytes = await stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5AuthReply) - if response.method != auth_method: - requested = AUTH_METHODS.get(auth_method, "UNKNOWN") - responded = AUTH_METHODS.get(response.method, "UNKNOWN") - raise ProxyError( - f"Requested {requested} from proxy server, but got {responded}." - ) - - if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: - # Username/password request - assert auth is not None - username, password = auth - conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) - outgoing_bytes = conn.data_to_send() - await stream.write(outgoing_bytes) - - # Username/password response - incoming_bytes = await stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) - if not response.success: - raise ProxyError("Invalid username/password") - - # Connect request - conn.send( - socks5.SOCKS5CommandRequest.from_address( - socks5.SOCKS5Command.CONNECT, (host, port) - ) - ) - outgoing_bytes = conn.data_to_send() - await stream.write(outgoing_bytes) - - # Connect response - incoming_bytes = await stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5Reply) - if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: - reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") - raise ProxyError(f"Proxy Server could not connect: {reply_code}.") - - -class AsyncSOCKSProxy(AsyncConnectionPool): - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: typing.Union[URL, bytes, str], - proxy_auth: typing.Optional[ - typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] - ] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - max_connections: typing.Optional[int] = 10, - max_keepalive_connections: typing.Optional[int] = None, - keepalive_expiry: typing.Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - network_backend: typing.Optional[AsyncNetworkBackend] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - ) - self._ssl_context = ssl_context - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if proxy_auth is not None: - username, password = proxy_auth - username_bytes = enforce_bytes(username, name="proxy_auth") - password_bytes = enforce_bytes(password, name="proxy_auth") - self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( - username_bytes, - password_bytes, - ) - else: - self._proxy_auth = None - - def create_connection(self, origin: Origin) -> AsyncConnectionInterface: - return AsyncSocks5Connection( - proxy_origin=self._proxy_url.origin, - remote_origin=origin, - proxy_auth=self._proxy_auth, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class AsyncSocks5Connection(AsyncConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - keepalive_expiry: typing.Optional[float] = None, - http1: bool = True, - http2: bool = False, - network_backend: typing.Optional[AsyncNetworkBackend] = None, - ) -> None: - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._proxy_auth = proxy_auth - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - - self._network_backend: AsyncNetworkBackend = ( - AutoBackend() if network_backend is None else network_backend - ) - self._connect_lock = AsyncLock() - self._connection: typing.Optional[AsyncConnectionInterface] = None - self._connect_failed = False - - async def handle_async_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - async with self._connect_lock: - if self._connection is None: - try: - # Connect to the proxy - kwargs = { - "host": self._proxy_origin.host.decode("ascii"), - "port": self._proxy_origin.port, - "timeout": timeout, - } - async with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = await self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - - # Connect to the remote host using socks5 - kwargs = { - "stream": stream, - "host": self._remote_origin.host.decode("ascii"), - "port": self._remote_origin.port, - "auth": self._proxy_auth, - } - async with Trace( - "setup_socks5_connection", logger, request, kwargs - ) as trace: - await _init_socks5_connection(**kwargs) - trace.return_value = stream - - # Upgrade the stream to SSL - if self._remote_origin.scheme == b"https": - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ( - ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ) - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - async with Trace("start_tls", logger, request, kwargs) as trace: - stream = await stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or ( - self._http2 and not self._http1 - ): # pragma: nocover - from .http2 import AsyncHTTP2Connection - - self._connection = AsyncHTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = AsyncHTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except Exception as exc: - self._connect_failed = True - raise exc - elif not self._connection.is_available(): # pragma: nocover - raise ConnectionNotAvailable() - - return await self._connection.handle_async_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - async def aclose(self) -> None: - if self._connection is not None: - await self._connection.aclose() - - def is_available(self) -> bool: - if self._connection is None: # pragma: nocover - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._remote_origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: # pragma: nocover - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/myenv/Lib/site-packages/httpcore/_backends/__init__.py b/myenv/Lib/site-packages/httpcore/_backends/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/httpcore/_backends/anyio.py b/myenv/Lib/site-packages/httpcore/_backends/anyio.py deleted file mode 100644 index d469e00..0000000 --- a/myenv/Lib/site-packages/httpcore/_backends/anyio.py +++ /dev/null @@ -1,148 +0,0 @@ -import ssl -import typing - -import anyio - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .._utils import is_socket_readable -from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream - - -class AnyIOStream(AsyncNetworkStream): - def __init__(self, stream: anyio.abc.ByteStream) -> None: - self._stream = stream - - async def read( - self, max_bytes: int, timeout: typing.Optional[float] = None - ) -> bytes: - exc_map = { - TimeoutError: ReadTimeout, - anyio.BrokenResourceError: ReadError, - anyio.ClosedResourceError: ReadError, - anyio.EndOfStream: ReadError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - try: - return await self._stream.receive(max_bytes=max_bytes) - except anyio.EndOfStream: # pragma: nocover - return b"" - - async def write( - self, buffer: bytes, timeout: typing.Optional[float] = None - ) -> None: - if not buffer: - return - - exc_map = { - TimeoutError: WriteTimeout, - anyio.BrokenResourceError: WriteError, - anyio.ClosedResourceError: WriteError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - await self._stream.send(item=buffer) - - async def aclose(self) -> None: - await self._stream.aclose() - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> AsyncNetworkStream: - exc_map = { - TimeoutError: ConnectTimeout, - anyio.BrokenResourceError: ConnectError, - anyio.EndOfStream: ConnectError, - ssl.SSLError: ConnectError, - } - with map_exceptions(exc_map): - try: - with anyio.fail_after(timeout): - ssl_stream = await anyio.streams.tls.TLSStream.wrap( - self._stream, - ssl_context=ssl_context, - hostname=server_hostname, - standard_compatible=False, - server_side=False, - ) - except Exception as exc: # pragma: nocover - await self.aclose() - raise exc - return AnyIOStream(ssl_stream) - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object": - return self._stream.extra(anyio.streams.tls.TLSAttribute.ssl_object, None) - if info == "client_addr": - return self._stream.extra(anyio.abc.SocketAttribute.local_address, None) - if info == "server_addr": - return self._stream.extra(anyio.abc.SocketAttribute.remote_address, None) - if info == "socket": - return self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) - if info == "is_readable": - sock = self._stream.extra(anyio.abc.SocketAttribute.raw_socket, None) - return is_socket_readable(sock) - return None - - -class AnyIOBackend(AsyncNetworkBackend): - async def connect_tcp( - self, - host: str, - port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: # pragma: nocover - if socket_options is None: - socket_options = [] - exc_map = { - TimeoutError: ConnectTimeout, - OSError: ConnectError, - anyio.BrokenResourceError: ConnectError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - stream: anyio.abc.ByteStream = await anyio.connect_tcp( - remote_host=host, - remote_port=port, - local_host=local_address, - ) - # By default TCP sockets opened in `asyncio` include TCP_NODELAY. - for option in socket_options: - stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return AnyIOStream(stream) - - async def connect_unix_socket( - self, - path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: # pragma: nocover - if socket_options is None: - socket_options = [] - exc_map = { - TimeoutError: ConnectTimeout, - OSError: ConnectError, - anyio.BrokenResourceError: ConnectError, - } - with map_exceptions(exc_map): - with anyio.fail_after(timeout): - stream: anyio.abc.ByteStream = await anyio.connect_unix(path) - for option in socket_options: - stream._raw_socket.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return AnyIOStream(stream) - - async def sleep(self, seconds: float) -> None: - await anyio.sleep(seconds) # pragma: nocover diff --git a/myenv/Lib/site-packages/httpcore/_backends/auto.py b/myenv/Lib/site-packages/httpcore/_backends/auto.py deleted file mode 100644 index 3ac05f4..0000000 --- a/myenv/Lib/site-packages/httpcore/_backends/auto.py +++ /dev/null @@ -1,51 +0,0 @@ -import typing -from typing import Optional - -from .._synchronization import current_async_library -from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream - - -class AutoBackend(AsyncNetworkBackend): - async def _init_backend(self) -> None: - if not (hasattr(self, "_backend")): - backend = current_async_library() - if backend == "trio": - from .trio import TrioBackend - - self._backend: AsyncNetworkBackend = TrioBackend() - else: - from .anyio import AnyIOBackend - - self._backend = AnyIOBackend() - - async def connect_tcp( - self, - host: str, - port: int, - timeout: Optional[float] = None, - local_address: Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - await self._init_backend() - return await self._backend.connect_tcp( - host, - port, - timeout=timeout, - local_address=local_address, - socket_options=socket_options, - ) - - async def connect_unix_socket( - self, - path: str, - timeout: Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: # pragma: nocover - await self._init_backend() - return await self._backend.connect_unix_socket( - path, timeout=timeout, socket_options=socket_options - ) - - async def sleep(self, seconds: float) -> None: # pragma: nocover - await self._init_backend() - return await self._backend.sleep(seconds) diff --git a/myenv/Lib/site-packages/httpcore/_backends/base.py b/myenv/Lib/site-packages/httpcore/_backends/base.py deleted file mode 100644 index 6cadedb..0000000 --- a/myenv/Lib/site-packages/httpcore/_backends/base.py +++ /dev/null @@ -1,103 +0,0 @@ -import ssl -import time -import typing - -SOCKET_OPTION = typing.Union[ - typing.Tuple[int, int, int], - typing.Tuple[int, int, typing.Union[bytes, bytearray]], - typing.Tuple[int, int, None, int], -] - - -class NetworkStream: - def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: - raise NotImplementedError() # pragma: nocover - - def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: - raise NotImplementedError() # pragma: nocover - - def close(self) -> None: - raise NotImplementedError() # pragma: nocover - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> "NetworkStream": - raise NotImplementedError() # pragma: nocover - - def get_extra_info(self, info: str) -> typing.Any: - return None # pragma: nocover - - -class NetworkBackend: - def connect_tcp( - self, - host: str, - port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: - raise NotImplementedError() # pragma: nocover - - def connect_unix_socket( - self, - path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: - raise NotImplementedError() # pragma: nocover - - def sleep(self, seconds: float) -> None: - time.sleep(seconds) # pragma: nocover - - -class AsyncNetworkStream: - async def read( - self, max_bytes: int, timeout: typing.Optional[float] = None - ) -> bytes: - raise NotImplementedError() # pragma: nocover - - async def write( - self, buffer: bytes, timeout: typing.Optional[float] = None - ) -> None: - raise NotImplementedError() # pragma: nocover - - async def aclose(self) -> None: - raise NotImplementedError() # pragma: nocover - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> "AsyncNetworkStream": - raise NotImplementedError() # pragma: nocover - - def get_extra_info(self, info: str) -> typing.Any: - return None # pragma: nocover - - -class AsyncNetworkBackend: - async def connect_tcp( - self, - host: str, - port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - raise NotImplementedError() # pragma: nocover - - async def connect_unix_socket( - self, - path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - raise NotImplementedError() # pragma: nocover - - async def sleep(self, seconds: float) -> None: - raise NotImplementedError() # pragma: nocover diff --git a/myenv/Lib/site-packages/httpcore/_backends/mock.py b/myenv/Lib/site-packages/httpcore/_backends/mock.py deleted file mode 100644 index f7aefeb..0000000 --- a/myenv/Lib/site-packages/httpcore/_backends/mock.py +++ /dev/null @@ -1,142 +0,0 @@ -import ssl -import typing -from typing import Optional - -from .._exceptions import ReadError -from .base import ( - SOCKET_OPTION, - AsyncNetworkBackend, - AsyncNetworkStream, - NetworkBackend, - NetworkStream, -) - - -class MockSSLObject: - def __init__(self, http2: bool): - self._http2 = http2 - - def selected_alpn_protocol(self) -> str: - return "h2" if self._http2 else "http/1.1" - - -class MockStream(NetworkStream): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - self._closed = False - - def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: - if self._closed: - raise ReadError("Connection closed") - if not self._buffer: - return b"" - return self._buffer.pop(0) - - def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: - pass - - def close(self) -> None: - self._closed = True - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: Optional[str] = None, - timeout: Optional[float] = None, - ) -> NetworkStream: - return self - - def get_extra_info(self, info: str) -> typing.Any: - return MockSSLObject(http2=self._http2) if info == "ssl_object" else None - - def __repr__(self) -> str: - return "" - - -class MockBackend(NetworkBackend): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - - def connect_tcp( - self, - host: str, - port: int, - timeout: Optional[float] = None, - local_address: Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: - return MockStream(list(self._buffer), http2=self._http2) - - def connect_unix_socket( - self, - path: str, - timeout: Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: - return MockStream(list(self._buffer), http2=self._http2) - - def sleep(self, seconds: float) -> None: - pass - - -class AsyncMockStream(AsyncNetworkStream): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - self._closed = False - - async def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: - if self._closed: - raise ReadError("Connection closed") - if not self._buffer: - return b"" - return self._buffer.pop(0) - - async def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: - pass - - async def aclose(self) -> None: - self._closed = True - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: Optional[str] = None, - timeout: Optional[float] = None, - ) -> AsyncNetworkStream: - return self - - def get_extra_info(self, info: str) -> typing.Any: - return MockSSLObject(http2=self._http2) if info == "ssl_object" else None - - def __repr__(self) -> str: - return "" - - -class AsyncMockBackend(AsyncNetworkBackend): - def __init__(self, buffer: typing.List[bytes], http2: bool = False) -> None: - self._buffer = buffer - self._http2 = http2 - - async def connect_tcp( - self, - host: str, - port: int, - timeout: Optional[float] = None, - local_address: Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - return AsyncMockStream(list(self._buffer), http2=self._http2) - - async def connect_unix_socket( - self, - path: str, - timeout: Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - return AsyncMockStream(list(self._buffer), http2=self._http2) - - async def sleep(self, seconds: float) -> None: - pass diff --git a/myenv/Lib/site-packages/httpcore/_backends/sync.py b/myenv/Lib/site-packages/httpcore/_backends/sync.py deleted file mode 100644 index 7b7b417..0000000 --- a/myenv/Lib/site-packages/httpcore/_backends/sync.py +++ /dev/null @@ -1,239 +0,0 @@ -import socket -import ssl -import sys -import typing -from functools import partial - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ExceptionMapping, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .._utils import is_socket_readable -from .base import SOCKET_OPTION, NetworkBackend, NetworkStream - - -class TLSinTLSStream(NetworkStream): # pragma: no cover - """ - Because the standard `SSLContext.wrap_socket` method does - not work for `SSLSocket` objects, we need this class - to implement TLS stream using an underlying `SSLObject` - instance in order to support TLS on top of TLS. - """ - - # Defined in RFC 8449 - TLS_RECORD_SIZE = 16384 - - def __init__( - self, - sock: socket.socket, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ): - self._sock = sock - self._incoming = ssl.MemoryBIO() - self._outgoing = ssl.MemoryBIO() - - self.ssl_obj = ssl_context.wrap_bio( - incoming=self._incoming, - outgoing=self._outgoing, - server_hostname=server_hostname, - ) - - self._sock.settimeout(timeout) - self._perform_io(self.ssl_obj.do_handshake) - - def _perform_io( - self, - func: typing.Callable[..., typing.Any], - ) -> typing.Any: - ret = None - - while True: - errno = None - try: - ret = func() - except (ssl.SSLWantReadError, ssl.SSLWantWriteError) as e: - errno = e.errno - - self._sock.sendall(self._outgoing.read()) - - if errno == ssl.SSL_ERROR_WANT_READ: - buf = self._sock.recv(self.TLS_RECORD_SIZE) - - if buf: - self._incoming.write(buf) - else: - self._incoming.write_eof() - if errno is None: - return ret - - def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: - exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} - with map_exceptions(exc_map): - self._sock.settimeout(timeout) - return typing.cast( - bytes, self._perform_io(partial(self.ssl_obj.read, max_bytes)) - ) - - def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: - exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} - with map_exceptions(exc_map): - self._sock.settimeout(timeout) - while buffer: - nsent = self._perform_io(partial(self.ssl_obj.write, buffer)) - buffer = buffer[nsent:] - - def close(self) -> None: - self._sock.close() - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> "NetworkStream": - raise NotImplementedError() - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object": - return self.ssl_obj - if info == "client_addr": - return self._sock.getsockname() - if info == "server_addr": - return self._sock.getpeername() - if info == "socket": - return self._sock - if info == "is_readable": - return is_socket_readable(self._sock) - return None - - -class SyncStream(NetworkStream): - def __init__(self, sock: socket.socket) -> None: - self._sock = sock - - def read(self, max_bytes: int, timeout: typing.Optional[float] = None) -> bytes: - exc_map: ExceptionMapping = {socket.timeout: ReadTimeout, OSError: ReadError} - with map_exceptions(exc_map): - self._sock.settimeout(timeout) - return self._sock.recv(max_bytes) - - def write(self, buffer: bytes, timeout: typing.Optional[float] = None) -> None: - if not buffer: - return - - exc_map: ExceptionMapping = {socket.timeout: WriteTimeout, OSError: WriteError} - with map_exceptions(exc_map): - while buffer: - self._sock.settimeout(timeout) - n = self._sock.send(buffer) - buffer = buffer[n:] - - def close(self) -> None: - self._sock.close() - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> NetworkStream: - exc_map: ExceptionMapping = { - socket.timeout: ConnectTimeout, - OSError: ConnectError, - } - with map_exceptions(exc_map): - try: - if isinstance(self._sock, ssl.SSLSocket): # pragma: no cover - # If the underlying socket has already been upgraded - # to the TLS layer (i.e. is an instance of SSLSocket), - # we need some additional smarts to support TLS-in-TLS. - return TLSinTLSStream( - self._sock, ssl_context, server_hostname, timeout - ) - else: - self._sock.settimeout(timeout) - sock = ssl_context.wrap_socket( - self._sock, server_hostname=server_hostname - ) - except Exception as exc: # pragma: nocover - self.close() - raise exc - return SyncStream(sock) - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object" and isinstance(self._sock, ssl.SSLSocket): - return self._sock._sslobj # type: ignore - if info == "client_addr": - return self._sock.getsockname() - if info == "server_addr": - return self._sock.getpeername() - if info == "socket": - return self._sock - if info == "is_readable": - return is_socket_readable(self._sock) - return None - - -class SyncBackend(NetworkBackend): - def connect_tcp( - self, - host: str, - port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: - # Note that we automatically include `TCP_NODELAY` - # in addition to any other custom socket options. - if socket_options is None: - socket_options = [] # pragma: no cover - address = (host, port) - source_address = None if local_address is None else (local_address, 0) - exc_map: ExceptionMapping = { - socket.timeout: ConnectTimeout, - OSError: ConnectError, - } - - with map_exceptions(exc_map): - sock = socket.create_connection( - address, - timeout, - source_address=source_address, - ) - for option in socket_options: - sock.setsockopt(*option) # pragma: no cover - sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) - return SyncStream(sock) - - def connect_unix_socket( - self, - path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> NetworkStream: # pragma: nocover - if sys.platform == "win32": - raise RuntimeError( - "Attempted to connect to a UNIX socket on a Windows system." - ) - if socket_options is None: - socket_options = [] - - exc_map: ExceptionMapping = { - socket.timeout: ConnectTimeout, - OSError: ConnectError, - } - with map_exceptions(exc_map): - sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - for option in socket_options: - sock.setsockopt(*option) - sock.settimeout(timeout) - sock.connect(path) - return SyncStream(sock) diff --git a/myenv/Lib/site-packages/httpcore/_backends/trio.py b/myenv/Lib/site-packages/httpcore/_backends/trio.py deleted file mode 100644 index b1626d2..0000000 --- a/myenv/Lib/site-packages/httpcore/_backends/trio.py +++ /dev/null @@ -1,161 +0,0 @@ -import ssl -import typing - -import trio - -from .._exceptions import ( - ConnectError, - ConnectTimeout, - ExceptionMapping, - ReadError, - ReadTimeout, - WriteError, - WriteTimeout, - map_exceptions, -) -from .base import SOCKET_OPTION, AsyncNetworkBackend, AsyncNetworkStream - - -class TrioStream(AsyncNetworkStream): - def __init__(self, stream: trio.abc.Stream) -> None: - self._stream = stream - - async def read( - self, max_bytes: int, timeout: typing.Optional[float] = None - ) -> bytes: - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ReadTimeout, - trio.BrokenResourceError: ReadError, - trio.ClosedResourceError: ReadError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - data: bytes = await self._stream.receive_some(max_bytes=max_bytes) - return data - - async def write( - self, buffer: bytes, timeout: typing.Optional[float] = None - ) -> None: - if not buffer: - return - - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: WriteTimeout, - trio.BrokenResourceError: WriteError, - trio.ClosedResourceError: WriteError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - await self._stream.send_all(data=buffer) - - async def aclose(self) -> None: - await self._stream.aclose() - - async def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: typing.Optional[str] = None, - timeout: typing.Optional[float] = None, - ) -> AsyncNetworkStream: - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ConnectTimeout, - trio.BrokenResourceError: ConnectError, - } - ssl_stream = trio.SSLStream( - self._stream, - ssl_context=ssl_context, - server_hostname=server_hostname, - https_compatible=True, - server_side=False, - ) - with map_exceptions(exc_map): - try: - with trio.fail_after(timeout_or_inf): - await ssl_stream.do_handshake() - except Exception as exc: # pragma: nocover - await self.aclose() - raise exc - return TrioStream(ssl_stream) - - def get_extra_info(self, info: str) -> typing.Any: - if info == "ssl_object" and isinstance(self._stream, trio.SSLStream): - # Type checkers cannot see `_ssl_object` attribute because trio._ssl.SSLStream uses __getattr__/__setattr__. - # Tracked at https://github.com/python-trio/trio/issues/542 - return self._stream._ssl_object # type: ignore[attr-defined] - if info == "client_addr": - return self._get_socket_stream().socket.getsockname() - if info == "server_addr": - return self._get_socket_stream().socket.getpeername() - if info == "socket": - stream = self._stream - while isinstance(stream, trio.SSLStream): - stream = stream.transport_stream - assert isinstance(stream, trio.SocketStream) - return stream.socket - if info == "is_readable": - socket = self.get_extra_info("socket") - return socket.is_readable() - return None - - def _get_socket_stream(self) -> trio.SocketStream: - stream = self._stream - while isinstance(stream, trio.SSLStream): - stream = stream.transport_stream - assert isinstance(stream, trio.SocketStream) - return stream - - -class TrioBackend(AsyncNetworkBackend): - async def connect_tcp( - self, - host: str, - port: int, - timeout: typing.Optional[float] = None, - local_address: typing.Optional[str] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: - # By default for TCP sockets, trio enables TCP_NODELAY. - # https://trio.readthedocs.io/en/stable/reference-io.html#trio.SocketStream - if socket_options is None: - socket_options = [] # pragma: no cover - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ConnectTimeout, - trio.BrokenResourceError: ConnectError, - OSError: ConnectError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - stream: trio.abc.Stream = await trio.open_tcp_stream( - host=host, port=port, local_address=local_address - ) - for option in socket_options: - stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return TrioStream(stream) - - async def connect_unix_socket( - self, - path: str, - timeout: typing.Optional[float] = None, - socket_options: typing.Optional[typing.Iterable[SOCKET_OPTION]] = None, - ) -> AsyncNetworkStream: # pragma: nocover - if socket_options is None: - socket_options = [] - timeout_or_inf = float("inf") if timeout is None else timeout - exc_map: ExceptionMapping = { - trio.TooSlowError: ConnectTimeout, - trio.BrokenResourceError: ConnectError, - OSError: ConnectError, - } - with map_exceptions(exc_map): - with trio.fail_after(timeout_or_inf): - stream: trio.abc.Stream = await trio.open_unix_socket(path) - for option in socket_options: - stream.setsockopt(*option) # type: ignore[attr-defined] # pragma: no cover - return TrioStream(stream) - - async def sleep(self, seconds: float) -> None: - await trio.sleep(seconds) # pragma: nocover diff --git a/myenv/Lib/site-packages/httpcore/_exceptions.py b/myenv/Lib/site-packages/httpcore/_exceptions.py deleted file mode 100644 index 81e7fc6..0000000 --- a/myenv/Lib/site-packages/httpcore/_exceptions.py +++ /dev/null @@ -1,81 +0,0 @@ -import contextlib -from typing import Iterator, Mapping, Type - -ExceptionMapping = Mapping[Type[Exception], Type[Exception]] - - -@contextlib.contextmanager -def map_exceptions(map: ExceptionMapping) -> Iterator[None]: - try: - yield - except Exception as exc: # noqa: PIE786 - for from_exc, to_exc in map.items(): - if isinstance(exc, from_exc): - raise to_exc(exc) from exc - raise # pragma: nocover - - -class ConnectionNotAvailable(Exception): - pass - - -class ProxyError(Exception): - pass - - -class UnsupportedProtocol(Exception): - pass - - -class ProtocolError(Exception): - pass - - -class RemoteProtocolError(ProtocolError): - pass - - -class LocalProtocolError(ProtocolError): - pass - - -# Timeout errors - - -class TimeoutException(Exception): - pass - - -class PoolTimeout(TimeoutException): - pass - - -class ConnectTimeout(TimeoutException): - pass - - -class ReadTimeout(TimeoutException): - pass - - -class WriteTimeout(TimeoutException): - pass - - -# Network errors - - -class NetworkError(Exception): - pass - - -class ConnectError(NetworkError): - pass - - -class ReadError(NetworkError): - pass - - -class WriteError(NetworkError): - pass diff --git a/myenv/Lib/site-packages/httpcore/_models.py b/myenv/Lib/site-packages/httpcore/_models.py deleted file mode 100644 index dadee79..0000000 --- a/myenv/Lib/site-packages/httpcore/_models.py +++ /dev/null @@ -1,492 +0,0 @@ -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Iterable, - Iterator, - List, - Mapping, - MutableMapping, - Optional, - Sequence, - Tuple, - Union, -) -from urllib.parse import urlparse - -# Functions for typechecking... - - -HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] -HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] -HeaderTypes = Union[HeadersAsSequence, HeadersAsMapping, None] - -Extensions = MutableMapping[str, Any] - - -def enforce_bytes(value: Union[bytes, str], *, name: str) -> bytes: - """ - Any arguments that are ultimately represented as bytes can be specified - either as bytes or as strings. - - However we enforce that any string arguments must only contain characters in - the plain ASCII range. chr(0)...chr(127). If you need to use characters - outside that range then be precise, and use a byte-wise argument. - """ - if isinstance(value, str): - try: - return value.encode("ascii") - except UnicodeEncodeError: - raise TypeError(f"{name} strings may not include unicode characters.") - elif isinstance(value, bytes): - return value - - seen_type = type(value).__name__ - raise TypeError(f"{name} must be bytes or str, but got {seen_type}.") - - -def enforce_url(value: Union["URL", bytes, str], *, name: str) -> "URL": - """ - Type check for URL parameters. - """ - if isinstance(value, (bytes, str)): - return URL(value) - elif isinstance(value, URL): - return value - - seen_type = type(value).__name__ - raise TypeError(f"{name} must be a URL, bytes, or str, but got {seen_type}.") - - -def enforce_headers( - value: Union[HeadersAsMapping, HeadersAsSequence, None] = None, *, name: str -) -> List[Tuple[bytes, bytes]]: - """ - Convienence function that ensure all items in request or response headers - are either bytes or strings in the plain ASCII range. - """ - if value is None: - return [] - elif isinstance(value, Mapping): - return [ - ( - enforce_bytes(k, name="header name"), - enforce_bytes(v, name="header value"), - ) - for k, v in value.items() - ] - elif isinstance(value, Sequence): - return [ - ( - enforce_bytes(k, name="header name"), - enforce_bytes(v, name="header value"), - ) - for k, v in value - ] - - seen_type = type(value).__name__ - raise TypeError( - f"{name} must be a mapping or sequence of two-tuples, but got {seen_type}." - ) - - -def enforce_stream( - value: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None], *, name: str -) -> Union[Iterable[bytes], AsyncIterable[bytes]]: - if value is None: - return ByteStream(b"") - elif isinstance(value, bytes): - return ByteStream(value) - return value - - -# * https://tools.ietf.org/html/rfc3986#section-3.2.3 -# * https://url.spec.whatwg.org/#url-miscellaneous -# * https://url.spec.whatwg.org/#scheme-state -DEFAULT_PORTS = { - b"ftp": 21, - b"http": 80, - b"https": 443, - b"ws": 80, - b"wss": 443, -} - - -def include_request_headers( - headers: List[Tuple[bytes, bytes]], - *, - url: "URL", - content: Union[None, bytes, Iterable[bytes], AsyncIterable[bytes]], -) -> List[Tuple[bytes, bytes]]: - headers_set = set(k.lower() for k, v in headers) - - if b"host" not in headers_set: - default_port = DEFAULT_PORTS.get(url.scheme) - if url.port is None or url.port == default_port: - header_value = url.host - else: - header_value = b"%b:%d" % (url.host, url.port) - headers = [(b"Host", header_value)] + headers - - if ( - content is not None - and b"content-length" not in headers_set - and b"transfer-encoding" not in headers_set - ): - if isinstance(content, bytes): - content_length = str(len(content)).encode("ascii") - headers += [(b"Content-Length", content_length)] - else: - headers += [(b"Transfer-Encoding", b"chunked")] # pragma: nocover - - return headers - - -# Interfaces for byte streams... - - -class ByteStream: - """ - A container for non-streaming content, and that supports both sync and async - stream iteration. - """ - - def __init__(self, content: bytes) -> None: - self._content = content - - def __iter__(self) -> Iterator[bytes]: - yield self._content - - async def __aiter__(self) -> AsyncIterator[bytes]: - yield self._content - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{len(self._content)} bytes]>" - - -class Origin: - def __init__(self, scheme: bytes, host: bytes, port: int) -> None: - self.scheme = scheme - self.host = host - self.port = port - - def __eq__(self, other: Any) -> bool: - return ( - isinstance(other, Origin) - and self.scheme == other.scheme - and self.host == other.host - and self.port == other.port - ) - - def __str__(self) -> str: - scheme = self.scheme.decode("ascii") - host = self.host.decode("ascii") - port = str(self.port) - return f"{scheme}://{host}:{port}" - - -class URL: - """ - Represents the URL against which an HTTP request may be made. - - The URL may either be specified as a plain string, for convienence: - - ```python - url = httpcore.URL("https://www.example.com/") - ``` - - Or be constructed with explicitily pre-parsed components: - - ```python - url = httpcore.URL(scheme=b'https', host=b'www.example.com', port=None, target=b'/') - ``` - - Using this second more explicit style allows integrations that are using - `httpcore` to pass through URLs that have already been parsed in order to use - libraries such as `rfc-3986` rather than relying on the stdlib. It also ensures - that URL parsing is treated identically at both the networking level and at any - higher layers of abstraction. - - The four components are important here, as they allow the URL to be precisely - specified in a pre-parsed format. They also allow certain types of request to - be created that could not otherwise be expressed. - - For example, an HTTP request to `http://www.example.com/` forwarded via a proxy - at `http://localhost:8080`... - - ```python - # Constructs an HTTP request with a complete URL as the target: - # GET https://www.example.com/ HTTP/1.1 - url = httpcore.URL( - scheme=b'http', - host=b'localhost', - port=8080, - target=b'https://www.example.com/' - ) - request = httpcore.Request( - method="GET", - url=url - ) - ``` - - Another example is constructing an `OPTIONS *` request... - - ```python - # Constructs an 'OPTIONS *' HTTP request: - # OPTIONS * HTTP/1.1 - url = httpcore.URL(scheme=b'https', host=b'www.example.com', target=b'*') - request = httpcore.Request(method="OPTIONS", url=url) - ``` - - This kind of request is not possible to formulate with a URL string, - because the `/` delimiter is always used to demark the target from the - host/port portion of the URL. - - For convenience, string-like arguments may be specified either as strings or - as bytes. However, once a request is being issue over-the-wire, the URL - components are always ultimately required to be a bytewise representation. - - In order to avoid any ambiguity over character encodings, when strings are used - as arguments, they must be strictly limited to the ASCII range `chr(0)`-`chr(127)`. - If you require a bytewise representation that is outside this range you must - handle the character encoding directly, and pass a bytes instance. - """ - - def __init__( - self, - url: Union[bytes, str] = "", - *, - scheme: Union[bytes, str] = b"", - host: Union[bytes, str] = b"", - port: Optional[int] = None, - target: Union[bytes, str] = b"", - ) -> None: - """ - Parameters: - url: The complete URL as a string or bytes. - scheme: The URL scheme as a string or bytes. - Typically either `"http"` or `"https"`. - host: The URL host as a string or bytes. Such as `"www.example.com"`. - port: The port to connect to. Either an integer or `None`. - target: The target of the HTTP request. Such as `"/items?search=red"`. - """ - if url: - parsed = urlparse(enforce_bytes(url, name="url")) - self.scheme = parsed.scheme - self.host = parsed.hostname or b"" - self.port = parsed.port - self.target = (parsed.path or b"/") + ( - b"?" + parsed.query if parsed.query else b"" - ) - else: - self.scheme = enforce_bytes(scheme, name="scheme") - self.host = enforce_bytes(host, name="host") - self.port = port - self.target = enforce_bytes(target, name="target") - - @property - def origin(self) -> Origin: - default_port = { - b"http": 80, - b"https": 443, - b"ws": 80, - b"wss": 443, - b"socks5": 1080, - }[self.scheme] - return Origin( - scheme=self.scheme, host=self.host, port=self.port or default_port - ) - - def __eq__(self, other: Any) -> bool: - return ( - isinstance(other, URL) - and other.scheme == self.scheme - and other.host == self.host - and other.port == self.port - and other.target == self.target - ) - - def __bytes__(self) -> bytes: - if self.port is None: - return b"%b://%b%b" % (self.scheme, self.host, self.target) - return b"%b://%b:%d%b" % (self.scheme, self.host, self.port, self.target) - - def __repr__(self) -> str: - return ( - f"{self.__class__.__name__}(scheme={self.scheme!r}, " - f"host={self.host!r}, port={self.port!r}, target={self.target!r})" - ) - - -class Request: - """ - An HTTP request. - """ - - def __init__( - self, - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> None: - """ - Parameters: - method: The HTTP request method, either as a string or bytes. - For example: `GET`. - url: The request URL, either as a `URL` instance, or as a string or bytes. - For example: `"https://www.example.com".` - headers: The HTTP request headers. - content: The content of the request body. - extensions: A dictionary of optional extra information included on - the request. Possible keys include `"timeout"`, and `"trace"`. - """ - self.method: bytes = enforce_bytes(method, name="method") - self.url: URL = enforce_url(url, name="url") - self.headers: List[Tuple[bytes, bytes]] = enforce_headers( - headers, name="headers" - ) - self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( - content, name="content" - ) - self.extensions = {} if extensions is None else extensions - - if "target" in self.extensions: - self.url = URL( - scheme=self.url.scheme, - host=self.url.host, - port=self.url.port, - target=self.extensions["target"], - ) - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.method!r}]>" - - -class Response: - """ - An HTTP response. - """ - - def __init__( - self, - status: int, - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterable[bytes], AsyncIterable[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> None: - """ - Parameters: - status: The HTTP status code of the response. For example `200`. - headers: The HTTP response headers. - content: The content of the response body. - extensions: A dictionary of optional extra information included on - the responseself.Possible keys include `"http_version"`, - `"reason_phrase"`, and `"network_stream"`. - """ - self.status: int = status - self.headers: List[Tuple[bytes, bytes]] = enforce_headers( - headers, name="headers" - ) - self.stream: Union[Iterable[bytes], AsyncIterable[bytes]] = enforce_stream( - content, name="content" - ) - self.extensions = {} if extensions is None else extensions - - self._stream_consumed = False - - @property - def content(self) -> bytes: - if not hasattr(self, "_content"): - if isinstance(self.stream, Iterable): - raise RuntimeError( - "Attempted to access 'response.content' on a streaming response. " - "Call 'response.read()' first." - ) - else: - raise RuntimeError( - "Attempted to access 'response.content' on a streaming response. " - "Call 'await response.aread()' first." - ) - return self._content - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.status}]>" - - # Sync interface... - - def read(self) -> bytes: - if not isinstance(self.stream, Iterable): # pragma: nocover - raise RuntimeError( - "Attempted to read an asynchronous response using 'response.read()'. " - "You should use 'await response.aread()' instead." - ) - if not hasattr(self, "_content"): - self._content = b"".join([part for part in self.iter_stream()]) - return self._content - - def iter_stream(self) -> Iterator[bytes]: - if not isinstance(self.stream, Iterable): # pragma: nocover - raise RuntimeError( - "Attempted to stream an asynchronous response using 'for ... in " - "response.iter_stream()'. " - "You should use 'async for ... in response.aiter_stream()' instead." - ) - if self._stream_consumed: - raise RuntimeError( - "Attempted to call 'for ... in response.iter_stream()' more than once." - ) - self._stream_consumed = True - for chunk in self.stream: - yield chunk - - def close(self) -> None: - if not isinstance(self.stream, Iterable): # pragma: nocover - raise RuntimeError( - "Attempted to close an asynchronous response using 'response.close()'. " - "You should use 'await response.aclose()' instead." - ) - if hasattr(self.stream, "close"): - self.stream.close() - - # Async interface... - - async def aread(self) -> bytes: - if not isinstance(self.stream, AsyncIterable): # pragma: nocover - raise RuntimeError( - "Attempted to read an synchronous response using " - "'await response.aread()'. " - "You should use 'response.read()' instead." - ) - if not hasattr(self, "_content"): - self._content = b"".join([part async for part in self.aiter_stream()]) - return self._content - - async def aiter_stream(self) -> AsyncIterator[bytes]: - if not isinstance(self.stream, AsyncIterable): # pragma: nocover - raise RuntimeError( - "Attempted to stream an synchronous response using 'async for ... in " - "response.aiter_stream()'. " - "You should use 'for ... in response.iter_stream()' instead." - ) - if self._stream_consumed: - raise RuntimeError( - "Attempted to call 'async for ... in response.aiter_stream()' " - "more than once." - ) - self._stream_consumed = True - async for chunk in self.stream: - yield chunk - - async def aclose(self) -> None: - if not isinstance(self.stream, AsyncIterable): # pragma: nocover - raise RuntimeError( - "Attempted to close a synchronous response using " - "'await response.aclose()'. " - "You should use 'response.close()' instead." - ) - if hasattr(self.stream, "aclose"): - await self.stream.aclose() diff --git a/myenv/Lib/site-packages/httpcore/_ssl.py b/myenv/Lib/site-packages/httpcore/_ssl.py deleted file mode 100644 index c99c5a6..0000000 --- a/myenv/Lib/site-packages/httpcore/_ssl.py +++ /dev/null @@ -1,9 +0,0 @@ -import ssl - -import certifi - - -def default_ssl_context() -> ssl.SSLContext: - context = ssl.create_default_context() - context.load_verify_locations(certifi.where()) - return context diff --git a/myenv/Lib/site-packages/httpcore/_sync/__init__.py b/myenv/Lib/site-packages/httpcore/_sync/__init__.py deleted file mode 100644 index b476d76..0000000 --- a/myenv/Lib/site-packages/httpcore/_sync/__init__.py +++ /dev/null @@ -1,39 +0,0 @@ -from .connection import HTTPConnection -from .connection_pool import ConnectionPool -from .http11 import HTTP11Connection -from .http_proxy import HTTPProxy -from .interfaces import ConnectionInterface - -try: - from .http2 import HTTP2Connection -except ImportError: # pragma: nocover - - class HTTP2Connection: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use http2 support, but the `h2` package is not " - "installed. Use 'pip install httpcore[http2]'." - ) - - -try: - from .socks_proxy import SOCKSProxy -except ImportError: # pragma: nocover - - class SOCKSProxy: # type: ignore - def __init__(self, *args, **kwargs) -> None: # type: ignore - raise RuntimeError( - "Attempted to use SOCKS support, but the `socksio` package is not " - "installed. Use 'pip install httpcore[socks]'." - ) - - -__all__ = [ - "HTTPConnection", - "ConnectionPool", - "HTTPProxy", - "HTTP11Connection", - "HTTP2Connection", - "ConnectionInterface", - "SOCKSProxy", -] diff --git a/myenv/Lib/site-packages/httpcore/_sync/connection.py b/myenv/Lib/site-packages/httpcore/_sync/connection.py deleted file mode 100644 index c3890f3..0000000 --- a/myenv/Lib/site-packages/httpcore/_sync/connection.py +++ /dev/null @@ -1,220 +0,0 @@ -import itertools -import logging -import ssl -from types import TracebackType -from typing import Iterable, Iterator, Optional, Type - -from .._backends.sync import SyncBackend -from .._backends.base import SOCKET_OPTION, NetworkBackend, NetworkStream -from .._exceptions import ConnectError, ConnectTimeout -from .._models import Origin, Request, Response -from .._ssl import default_ssl_context -from .._synchronization import Lock -from .._trace import Trace -from .http11 import HTTP11Connection -from .interfaces import ConnectionInterface - -RETRIES_BACKOFF_FACTOR = 0.5 # 0s, 0.5s, 1s, 2s, 4s, etc. - - -logger = logging.getLogger("httpcore.connection") - - -def exponential_backoff(factor: float) -> Iterator[float]: - """ - Generate a geometric sequence that has a ratio of 2 and starts with 0. - - For example: - - `factor = 2`: `0, 2, 4, 8, 16, 32, 64, ...` - - `factor = 3`: `0, 3, 6, 12, 24, 48, 96, ...` - """ - yield 0 - for n in itertools.count(): - yield factor * 2**n - - -class HTTPConnection(ConnectionInterface): - def __init__( - self, - origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - self._origin = origin - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._network_backend: NetworkBackend = ( - SyncBackend() if network_backend is None else network_backend - ) - self._connection: Optional[ConnectionInterface] = None - self._connect_failed: bool = False - self._request_lock = Lock() - self._socket_options = socket_options - - def handle_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection to {self._origin}" - ) - - try: - with self._request_lock: - if self._connection is None: - stream = self._connect(request) - - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import HTTP2Connection - - self._connection = HTTP2Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = HTTP11Connection( - origin=self._origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except BaseException as exc: - self._connect_failed = True - raise exc - - return self._connection.handle_request(request) - - def _connect(self, request: Request) -> NetworkStream: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - retries_left = self._retries - delays = exponential_backoff(factor=RETRIES_BACKOFF_FACTOR) - - while True: - try: - if self._uds is None: - kwargs = { - "host": self._origin.host.decode("ascii"), - "port": self._origin.port, - "local_address": self._local_address, - "timeout": timeout, - "socket_options": self._socket_options, - } - with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - else: - kwargs = { - "path": self._uds, - "timeout": timeout, - "socket_options": self._socket_options, - } - with Trace( - "connect_unix_socket", logger, request, kwargs - ) as trace: - stream = self._network_backend.connect_unix_socket( - **kwargs - ) - trace.return_value = stream - - if self._origin.scheme in (b"https", b"wss"): - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._origin.host.decode("ascii"), - "timeout": timeout, - } - with Trace("start_tls", logger, request, kwargs) as trace: - stream = stream.start_tls(**kwargs) - trace.return_value = stream - return stream - except (ConnectError, ConnectTimeout): - if retries_left <= 0: - raise - retries_left -= 1 - delay = next(delays) - with Trace("retry", logger, request, kwargs) as trace: - self._network_backend.sleep(delay) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def close(self) -> None: - if self._connection is not None: - with Trace("close", logger, None, {}): - self._connection.close() - - def is_available(self) -> bool: - if self._connection is None: - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - def __enter__(self) -> "HTTPConnection": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - self.close() diff --git a/myenv/Lib/site-packages/httpcore/_sync/connection_pool.py b/myenv/Lib/site-packages/httpcore/_sync/connection_pool.py deleted file mode 100644 index 01bec59..0000000 --- a/myenv/Lib/site-packages/httpcore/_sync/connection_pool.py +++ /dev/null @@ -1,380 +0,0 @@ -import ssl -import sys -from types import TracebackType -from typing import Iterable, Iterator, Iterable, List, Optional, Type - -from .._backends.sync import SyncBackend -from .._backends.base import SOCKET_OPTION, NetworkBackend -from .._exceptions import ConnectionNotAvailable, UnsupportedProtocol -from .._models import Origin, Request, Response -from .._synchronization import Event, ShieldCancellation, ThreadLock -from .connection import HTTPConnection -from .interfaces import ConnectionInterface, RequestInterface - - -class PoolRequest: - def __init__(self, request: Request) -> None: - self.request = request - self.connection: Optional[ConnectionInterface] = None - self._connection_acquired = Event() - - def assign_to_connection( - self, connection: Optional[ConnectionInterface] - ) -> None: - self.connection = connection - self._connection_acquired.set() - - def clear_connection(self) -> None: - self.connection = None - self._connection_acquired = Event() - - def wait_for_connection( - self, timeout: Optional[float] = None - ) -> ConnectionInterface: - if self.connection is None: - self._connection_acquired.wait(timeout=timeout) - assert self.connection is not None - return self.connection - - def is_queued(self) -> bool: - return self.connection is None - - -class ConnectionPool(RequestInterface): - """ - A connection pool for making HTTP requests. - """ - - def __init__( - self, - ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish a - connection. - local_address: Local address to connect from. Can also be used to connect - using a particular address family. Using `local_address="0.0.0.0"` - will connect using an `AF_INET` address (IPv4), while using - `local_address="::"` will connect using an `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - socket_options: Socket options that have to be included - in the TCP socket when the connection was established. - """ - self._ssl_context = ssl_context - - self._max_connections = ( - sys.maxsize if max_connections is None else max_connections - ) - self._max_keepalive_connections = ( - sys.maxsize - if max_keepalive_connections is None - else max_keepalive_connections - ) - self._max_keepalive_connections = min( - self._max_connections, self._max_keepalive_connections - ) - - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._retries = retries - self._local_address = local_address - self._uds = uds - - self._network_backend = ( - SyncBackend() if network_backend is None else network_backend - ) - self._socket_options = socket_options - - # The mutable state on a connection pool is the queue of incoming requests, - # and the set of connections that are servicing those requests. - self._connections: List[ConnectionInterface] = [] - self._requests: List[PoolRequest] = [] - - # We only mutate the state of the connection pool within an 'optional_thread_lock' - # context. This holds a threading lock unless we're running in async mode, - # in which case it is a no-op. - self._optional_thread_lock = ThreadLock() - - def create_connection(self, origin: Origin) -> ConnectionInterface: - return HTTPConnection( - origin=origin, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - retries=self._retries, - local_address=self._local_address, - uds=self._uds, - network_backend=self._network_backend, - socket_options=self._socket_options, - ) - - @property - def connections(self) -> List[ConnectionInterface]: - """ - Return a list of the connections currently in the pool. - - For example: - - ```python - >>> pool.connections - [ - , - , - , - ] - ``` - """ - return list(self._connections) - - def handle_request(self, request: Request) -> Response: - """ - Send an HTTP request, and return an HTTP response. - - This is the core implementation that is called into by `.request()` or `.stream()`. - """ - scheme = request.url.scheme.decode() - if scheme == "": - raise UnsupportedProtocol( - "Request URL is missing an 'http://' or 'https://' protocol." - ) - if scheme not in ("http", "https", "ws", "wss"): - raise UnsupportedProtocol( - f"Request URL has an unsupported protocol '{scheme}://'." - ) - - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("pool", None) - - with self._optional_thread_lock: - # Add the incoming request to our request queue. - pool_request = PoolRequest(request) - self._requests.append(pool_request) - - try: - while True: - with self._optional_thread_lock: - # Assign incoming requests to available connections, - # closing or creating new connections as required. - closing = self._assign_requests_to_connections() - self._close_connections(closing) - - # Wait until this request has an assigned connection. - connection = pool_request.wait_for_connection(timeout=timeout) - - try: - # Send the request on the assigned connection. - response = connection.handle_request( - pool_request.request - ) - except ConnectionNotAvailable: - # In some cases a connection may initially be available to - # handle a request, but then become unavailable. - # - # In this case we clear the connection and try again. - pool_request.clear_connection() - else: - break # pragma: nocover - - except BaseException as exc: - with self._optional_thread_lock: - # For any exception or cancellation we remove the request from - # the queue, and then re-assign requests to connections. - self._requests.remove(pool_request) - closing = self._assign_requests_to_connections() - - self._close_connections(closing) - raise exc from None - - # Return the response. Note that in this case we still have to manage - # the point at which the response is closed. - assert isinstance(response.stream, Iterable) - return Response( - status=response.status, - headers=response.headers, - content=PoolByteStream( - stream=response.stream, pool_request=pool_request, pool=self - ), - extensions=response.extensions, - ) - - def _assign_requests_to_connections(self) -> List[ConnectionInterface]: - """ - Manage the state of the connection pool, assigning incoming - requests to connections as available. - - Called whenever a new request is added or removed from the pool. - - Any closing connections are returned, allowing the I/O for closing - those connections to be handled seperately. - """ - closing_connections = [] - - # First we handle cleaning up any connections that are closed, - # have expired their keep-alive, or surplus idle connections. - for connection in list(self._connections): - if connection.is_closed(): - # log: "removing closed connection" - self._connections.remove(connection) - elif connection.has_expired(): - # log: "closing expired connection" - self._connections.remove(connection) - closing_connections.append(connection) - elif ( - connection.is_idle() - and len([connection.is_idle() for connection in self._connections]) - > self._max_keepalive_connections - ): - # log: "closing idle connection" - self._connections.remove(connection) - closing_connections.append(connection) - - # Assign queued requests to connections. - queued_requests = [request for request in self._requests if request.is_queued()] - for pool_request in queued_requests: - origin = pool_request.request.url.origin - available_connections = [ - connection - for connection in self._connections - if connection.can_handle_request(origin) and connection.is_available() - ] - idle_connections = [ - connection for connection in self._connections if connection.is_idle() - ] - - # There are three cases for how we may be able to handle the request: - # - # 1. There is an existing connection that can handle the request. - # 2. We can create a new connection to handle the request. - # 3. We can close an idle connection and then create a new connection - # to handle the request. - if available_connections: - # log: "reusing existing connection" - connection = available_connections[0] - pool_request.assign_to_connection(connection) - elif len(self._connections) < self._max_connections: - # log: "creating new connection" - connection = self.create_connection(origin) - self._connections.append(connection) - pool_request.assign_to_connection(connection) - elif idle_connections: - # log: "closing idle connection" - connection = idle_connections[0] - self._connections.remove(connection) - closing_connections.append(connection) - # log: "creating new connection" - connection = self.create_connection(origin) - self._connections.append(connection) - pool_request.assign_to_connection(connection) - - return closing_connections - - def _close_connections(self, closing: List[ConnectionInterface]) -> None: - # Close connections which have been removed from the pool. - with ShieldCancellation(): - for connection in closing: - connection.close() - - def close(self) -> None: - # Explicitly close the connection pool. - # Clears all existing requests and connections. - with self._optional_thread_lock: - closing_connections = list(self._connections) - self._connections = [] - self._close_connections(closing_connections) - - def __enter__(self) -> "ConnectionPool": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - self.close() - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - with self._optional_thread_lock: - request_is_queued = [request.is_queued() for request in self._requests] - connection_is_idle = [ - connection.is_idle() for connection in self._connections - ] - - num_active_requests = request_is_queued.count(False) - num_queued_requests = request_is_queued.count(True) - num_active_connections = connection_is_idle.count(False) - num_idle_connections = connection_is_idle.count(True) - - requests_info = ( - f"Requests: {num_active_requests} active, {num_queued_requests} queued" - ) - connection_info = ( - f"Connections: {num_active_connections} active, {num_idle_connections} idle" - ) - - return f"<{class_name} [{requests_info} | {connection_info}]>" - - -class PoolByteStream: - def __init__( - self, - stream: Iterable[bytes], - pool_request: PoolRequest, - pool: ConnectionPool, - ) -> None: - self._stream = stream - self._pool_request = pool_request - self._pool = pool - self._closed = False - - def __iter__(self) -> Iterator[bytes]: - try: - for part in self._stream: - yield part - except BaseException as exc: - self.close() - raise exc from None - - def close(self) -> None: - if not self._closed: - self._closed = True - with ShieldCancellation(): - if hasattr(self._stream, "close"): - self._stream.close() - - with self._pool._optional_thread_lock: - self._pool._requests.remove(self._pool_request) - closing = self._pool._assign_requests_to_connections() - - self._pool._close_connections(closing) diff --git a/myenv/Lib/site-packages/httpcore/_sync/http11.py b/myenv/Lib/site-packages/httpcore/_sync/http11.py deleted file mode 100644 index a74ff8e..0000000 --- a/myenv/Lib/site-packages/httpcore/_sync/http11.py +++ /dev/null @@ -1,386 +0,0 @@ -import enum -import logging -import ssl -import time -from types import TracebackType -from typing import ( - Any, - Iterable, - Iterator, - List, - Optional, - Tuple, - Type, - Union, -) - -import h11 - -from .._backends.base import NetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, - WriteError, - map_exceptions, -) -from .._models import Origin, Request, Response -from .._synchronization import Lock, ShieldCancellation -from .._trace import Trace -from .interfaces import ConnectionInterface - -logger = logging.getLogger("httpcore.http11") - - -# A subset of `h11.Event` types supported by `_send_event` -H11SendEvent = Union[ - h11.Request, - h11.Data, - h11.EndOfMessage, -] - - -class HTTPConnectionState(enum.IntEnum): - NEW = 0 - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class HTTP11Connection(ConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - MAX_INCOMPLETE_EVENT_SIZE = 100 * 1024 - - def __init__( - self, - origin: Origin, - stream: NetworkStream, - keepalive_expiry: Optional[float] = None, - ) -> None: - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: Optional[float] = keepalive_expiry - self._expire_at: Optional[float] = None - self._state = HTTPConnectionState.NEW - self._state_lock = Lock() - self._request_count = 0 - self._h11_state = h11.Connection( - our_role=h11.CLIENT, - max_incomplete_event_size=self.MAX_INCOMPLETE_EVENT_SIZE, - ) - - def handle_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - with self._state_lock: - if self._state in (HTTPConnectionState.NEW, HTTPConnectionState.IDLE): - self._request_count += 1 - self._state = HTTPConnectionState.ACTIVE - self._expire_at = None - else: - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request} - try: - with Trace( - "send_request_headers", logger, request, kwargs - ) as trace: - self._send_request_headers(**kwargs) - with Trace("send_request_body", logger, request, kwargs) as trace: - self._send_request_body(**kwargs) - except WriteError: - # If we get a write error while we're writing the request, - # then we supress this error and move on to attempting to - # read the response. Servers can sometimes close the request - # pre-emptively and then respond with a well formed HTTP - # error response. - pass - - with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - ( - http_version, - status, - reason_phrase, - headers, - trailing_data, - ) = self._receive_response_headers(**kwargs) - trace.return_value = ( - http_version, - status, - reason_phrase, - headers, - ) - - network_stream = self._network_stream - - # CONNECT or Upgrade request - if (status == 101) or ( - (request.method == b"CONNECT") and (200 <= status < 300) - ): - network_stream = HTTP11UpgradeStream(network_stream, trailing_data) - - return Response( - status=status, - headers=headers, - content=HTTP11ConnectionByteStream(self, request), - extensions={ - "http_version": http_version, - "reason_phrase": reason_phrase, - "network_stream": network_stream, - }, - ) - except BaseException as exc: - with ShieldCancellation(): - with Trace("response_closed", logger, request) as trace: - self._response_closed() - raise exc - - # Sending the request... - - def _send_request_headers(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - with map_exceptions({h11.LocalProtocolError: LocalProtocolError}): - event = h11.Request( - method=request.method, - target=request.url.target, - headers=request.headers, - ) - self._send_event(event, timeout=timeout) - - def _send_request_body(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - assert isinstance(request.stream, Iterable) - for chunk in request.stream: - event = h11.Data(data=chunk) - self._send_event(event, timeout=timeout) - - self._send_event(h11.EndOfMessage(), timeout=timeout) - - def _send_event( - self, event: h11.Event, timeout: Optional[float] = None - ) -> None: - bytes_to_send = self._h11_state.send(event) - if bytes_to_send is not None: - self._network_stream.write(bytes_to_send, timeout=timeout) - - # Receiving the response... - - def _receive_response_headers( - self, request: Request - ) -> Tuple[bytes, int, bytes, List[Tuple[bytes, bytes]], bytes]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = self._receive_event(timeout=timeout) - if isinstance(event, h11.Response): - break - if ( - isinstance(event, h11.InformationalResponse) - and event.status_code == 101 - ): - break - - http_version = b"HTTP/" + event.http_version - - # h11 version 0.11+ supports a `raw_items` interface to get the - # raw header casing, rather than the enforced lowercase headers. - headers = event.headers.raw_items() - - trailing_data, _ = self._h11_state.trailing_data - - return http_version, event.status_code, event.reason, headers, trailing_data - - def _receive_response_body(self, request: Request) -> Iterator[bytes]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - while True: - event = self._receive_event(timeout=timeout) - if isinstance(event, h11.Data): - yield bytes(event.data) - elif isinstance(event, (h11.EndOfMessage, h11.PAUSED)): - break - - def _receive_event( - self, timeout: Optional[float] = None - ) -> Union[h11.Event, Type[h11.PAUSED]]: - while True: - with map_exceptions({h11.RemoteProtocolError: RemoteProtocolError}): - event = self._h11_state.next_event() - - if event is h11.NEED_DATA: - data = self._network_stream.read( - self.READ_NUM_BYTES, timeout=timeout - ) - - # If we feed this case through h11 we'll raise an exception like: - # - # httpcore.RemoteProtocolError: can't handle event type - # ConnectionClosed when role=SERVER and state=SEND_RESPONSE - # - # Which is accurate, but not very informative from an end-user - # perspective. Instead we handle this case distinctly and treat - # it as a ConnectError. - if data == b"" and self._h11_state.their_state == h11.SEND_RESPONSE: - msg = "Server disconnected without sending a response." - raise RemoteProtocolError(msg) - - self._h11_state.receive_data(data) - else: - # mypy fails to narrow the type in the above if statement above - return event # type: ignore[return-value] - - def _response_closed(self) -> None: - with self._state_lock: - if ( - self._h11_state.our_state is h11.DONE - and self._h11_state.their_state is h11.DONE - ): - self._state = HTTPConnectionState.IDLE - self._h11_state.start_next_cycle() - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - else: - self.close() - - # Once the connection is no longer required... - - def close(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._state = HTTPConnectionState.CLOSED - self._network_stream.close() - - # The ConnectionInterface methods provide information about the state of - # the connection, allowing for a connection pooling implementation to - # determine when to reuse and when to close the connection... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - # Note that HTTP/1.1 connections in the "NEW" state are not treated as - # being "available". The control flow which created the connection will - # be able to send an outgoing request, but the connection will not be - # acquired from the connection pool for any other request. - return self._state == HTTPConnectionState.IDLE - - def has_expired(self) -> bool: - now = time.monotonic() - keepalive_expired = self._expire_at is not None and now > self._expire_at - - # If the HTTP connection is idle but the socket is readable, then the - # only valid state is that the socket is about to return b"", indicating - # a server-initiated disconnect. - server_disconnected = ( - self._state == HTTPConnectionState.IDLE - and self._network_stream.get_extra_info("is_readable") - ) - - return keepalive_expired or server_disconnected - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/1.1, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - def __enter__(self) -> "HTTP11Connection": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - self.close() - - -class HTTP11ConnectionByteStream: - def __init__(self, connection: HTTP11Connection, request: Request) -> None: - self._connection = connection - self._request = request - self._closed = False - - def __iter__(self) -> Iterator[bytes]: - kwargs = {"request": self._request} - try: - with Trace("receive_response_body", logger, self._request, kwargs): - for chunk in self._connection._receive_response_body(**kwargs): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with ShieldCancellation(): - self.close() - raise exc - - def close(self) -> None: - if not self._closed: - self._closed = True - with Trace("response_closed", logger, self._request): - self._connection._response_closed() - - -class HTTP11UpgradeStream(NetworkStream): - def __init__(self, stream: NetworkStream, leading_data: bytes) -> None: - self._stream = stream - self._leading_data = leading_data - - def read(self, max_bytes: int, timeout: Optional[float] = None) -> bytes: - if self._leading_data: - buffer = self._leading_data[:max_bytes] - self._leading_data = self._leading_data[max_bytes:] - return buffer - else: - return self._stream.read(max_bytes, timeout) - - def write(self, buffer: bytes, timeout: Optional[float] = None) -> None: - self._stream.write(buffer, timeout) - - def close(self) -> None: - self._stream.close() - - def start_tls( - self, - ssl_context: ssl.SSLContext, - server_hostname: Optional[str] = None, - timeout: Optional[float] = None, - ) -> NetworkStream: - return self._stream.start_tls(ssl_context, server_hostname, timeout) - - def get_extra_info(self, info: str) -> Any: - return self._stream.get_extra_info(info) diff --git a/myenv/Lib/site-packages/httpcore/_sync/http2.py b/myenv/Lib/site-packages/httpcore/_sync/http2.py deleted file mode 100644 index 1ee4bbb..0000000 --- a/myenv/Lib/site-packages/httpcore/_sync/http2.py +++ /dev/null @@ -1,589 +0,0 @@ -import enum -import logging -import time -import types -import typing - -import h2.config -import h2.connection -import h2.events -import h2.exceptions -import h2.settings - -from .._backends.base import NetworkStream -from .._exceptions import ( - ConnectionNotAvailable, - LocalProtocolError, - RemoteProtocolError, -) -from .._models import Origin, Request, Response -from .._synchronization import Lock, Semaphore, ShieldCancellation -from .._trace import Trace -from .interfaces import ConnectionInterface - -logger = logging.getLogger("httpcore.http2") - - -def has_body_headers(request: Request) -> bool: - return any( - k.lower() == b"content-length" or k.lower() == b"transfer-encoding" - for k, v in request.headers - ) - - -class HTTPConnectionState(enum.IntEnum): - ACTIVE = 1 - IDLE = 2 - CLOSED = 3 - - -class HTTP2Connection(ConnectionInterface): - READ_NUM_BYTES = 64 * 1024 - CONFIG = h2.config.H2Configuration(validate_inbound_headers=False) - - def __init__( - self, - origin: Origin, - stream: NetworkStream, - keepalive_expiry: typing.Optional[float] = None, - ): - self._origin = origin - self._network_stream = stream - self._keepalive_expiry: typing.Optional[float] = keepalive_expiry - self._h2_state = h2.connection.H2Connection(config=self.CONFIG) - self._state = HTTPConnectionState.IDLE - self._expire_at: typing.Optional[float] = None - self._request_count = 0 - self._init_lock = Lock() - self._state_lock = Lock() - self._read_lock = Lock() - self._write_lock = Lock() - self._sent_connection_init = False - self._used_all_stream_ids = False - self._connection_error = False - - # Mapping from stream ID to response stream events. - self._events: typing.Dict[ - int, - typing.Union[ - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ], - ] = {} - - # Connection terminated events are stored as state since - # we need to handle them for all streams. - self._connection_terminated: typing.Optional[h2.events.ConnectionTerminated] = ( - None - ) - - self._read_exception: typing.Optional[Exception] = None - self._write_exception: typing.Optional[Exception] = None - - def handle_request(self, request: Request) -> Response: - if not self.can_handle_request(request.url.origin): - # This cannot occur in normal operation, since the connection pool - # will only send requests on connections that handle them. - # It's in place simply for resilience as a guard against incorrect - # usage, for anyone working directly with httpcore connections. - raise RuntimeError( - f"Attempted to send request to {request.url.origin} on connection " - f"to {self._origin}" - ) - - with self._state_lock: - if self._state in (HTTPConnectionState.ACTIVE, HTTPConnectionState.IDLE): - self._request_count += 1 - self._expire_at = None - self._state = HTTPConnectionState.ACTIVE - else: - raise ConnectionNotAvailable() - - with self._init_lock: - if not self._sent_connection_init: - try: - kwargs = {"request": request} - with Trace("send_connection_init", logger, request, kwargs): - self._send_connection_init(**kwargs) - except BaseException as exc: - with ShieldCancellation(): - self.close() - raise exc - - self._sent_connection_init = True - - # Initially start with just 1 until the remote server provides - # its max_concurrent_streams value - self._max_streams = 1 - - local_settings_max_streams = ( - self._h2_state.local_settings.max_concurrent_streams - ) - self._max_streams_semaphore = Semaphore(local_settings_max_streams) - - for _ in range(local_settings_max_streams - self._max_streams): - self._max_streams_semaphore.acquire() - - self._max_streams_semaphore.acquire() - - try: - stream_id = self._h2_state.get_next_available_stream_id() - self._events[stream_id] = [] - except h2.exceptions.NoAvailableStreamIDError: # pragma: nocover - self._used_all_stream_ids = True - self._request_count -= 1 - raise ConnectionNotAvailable() - - try: - kwargs = {"request": request, "stream_id": stream_id} - with Trace("send_request_headers", logger, request, kwargs): - self._send_request_headers(request=request, stream_id=stream_id) - with Trace("send_request_body", logger, request, kwargs): - self._send_request_body(request=request, stream_id=stream_id) - with Trace( - "receive_response_headers", logger, request, kwargs - ) as trace: - status, headers = self._receive_response( - request=request, stream_id=stream_id - ) - trace.return_value = (status, headers) - - return Response( - status=status, - headers=headers, - content=HTTP2ConnectionByteStream(self, request, stream_id=stream_id), - extensions={ - "http_version": b"HTTP/2", - "network_stream": self._network_stream, - "stream_id": stream_id, - }, - ) - except BaseException as exc: # noqa: PIE786 - with ShieldCancellation(): - kwargs = {"stream_id": stream_id} - with Trace("response_closed", logger, request, kwargs): - self._response_closed(stream_id=stream_id) - - if isinstance(exc, h2.exceptions.ProtocolError): - # One case where h2 can raise a protocol error is when a - # closed frame has been seen by the state machine. - # - # This happens when one stream is reading, and encounters - # a GOAWAY event. Other flows of control may then raise - # a protocol error at any point they interact with the 'h2_state'. - # - # In this case we'll have stored the event, and should raise - # it as a RemoteProtocolError. - if self._connection_terminated: # pragma: nocover - raise RemoteProtocolError(self._connection_terminated) - # If h2 raises a protocol error in some other state then we - # must somehow have made a protocol violation. - raise LocalProtocolError(exc) # pragma: nocover - - raise exc - - def _send_connection_init(self, request: Request) -> None: - """ - The HTTP/2 connection requires some initial setup before we can start - using individual request/response streams on it. - """ - # Need to set these manually here instead of manipulating via - # __setitem__() otherwise the H2Connection will emit SettingsUpdate - # frames in addition to sending the undesired defaults. - self._h2_state.local_settings = h2.settings.Settings( - client=True, - initial_values={ - # Disable PUSH_PROMISE frames from the server since we don't do anything - # with them for now. Maybe when we support caching? - h2.settings.SettingCodes.ENABLE_PUSH: 0, - # These two are taken from h2 for safe defaults - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100, - h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 65536, - }, - ) - - # Some websites (*cough* Yahoo *cough*) balk at this setting being - # present in the initial handshake since it's not defined in the original - # RFC despite the RFC mandating ignoring settings you don't know about. - del self._h2_state.local_settings[ - h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL - ] - - self._h2_state.initiate_connection() - self._h2_state.increment_flow_control_window(2**24) - self._write_outgoing_data(request) - - # Sending the request... - - def _send_request_headers(self, request: Request, stream_id: int) -> None: - """ - Send the request headers to a given stream ID. - """ - end_stream = not has_body_headers(request) - - # In HTTP/2 the ':authority' pseudo-header is used instead of 'Host'. - # In order to gracefully handle HTTP/1.1 and HTTP/2 we always require - # HTTP/1.1 style headers, and map them appropriately if we end up on - # an HTTP/2 connection. - authority = [v for k, v in request.headers if k.lower() == b"host"][0] - - headers = [ - (b":method", request.method), - (b":authority", authority), - (b":scheme", request.url.scheme), - (b":path", request.url.target), - ] + [ - (k.lower(), v) - for k, v in request.headers - if k.lower() - not in ( - b"host", - b"transfer-encoding", - ) - ] - - self._h2_state.send_headers(stream_id, headers, end_stream=end_stream) - self._h2_state.increment_flow_control_window(2**24, stream_id=stream_id) - self._write_outgoing_data(request) - - def _send_request_body(self, request: Request, stream_id: int) -> None: - """ - Iterate over the request body sending it to a given stream ID. - """ - if not has_body_headers(request): - return - - assert isinstance(request.stream, typing.Iterable) - for data in request.stream: - self._send_stream_data(request, stream_id, data) - self._send_end_stream(request, stream_id) - - def _send_stream_data( - self, request: Request, stream_id: int, data: bytes - ) -> None: - """ - Send a single chunk of data in one or more data frames. - """ - while data: - max_flow = self._wait_for_outgoing_flow(request, stream_id) - chunk_size = min(len(data), max_flow) - chunk, data = data[:chunk_size], data[chunk_size:] - self._h2_state.send_data(stream_id, chunk) - self._write_outgoing_data(request) - - def _send_end_stream(self, request: Request, stream_id: int) -> None: - """ - Send an empty data frame on on a given stream ID with the END_STREAM flag set. - """ - self._h2_state.end_stream(stream_id) - self._write_outgoing_data(request) - - # Receiving the response... - - def _receive_response( - self, request: Request, stream_id: int - ) -> typing.Tuple[int, typing.List[typing.Tuple[bytes, bytes]]]: - """ - Return the response status code and headers for a given stream ID. - """ - while True: - event = self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.ResponseReceived): - break - - status_code = 200 - headers = [] - for k, v in event.headers: - if k == b":status": - status_code = int(v.decode("ascii", errors="ignore")) - elif not k.startswith(b":"): - headers.append((k, v)) - - return (status_code, headers) - - def _receive_response_body( - self, request: Request, stream_id: int - ) -> typing.Iterator[bytes]: - """ - Iterator that returns the bytes of the response body for a given stream ID. - """ - while True: - event = self._receive_stream_event(request, stream_id) - if isinstance(event, h2.events.DataReceived): - amount = event.flow_controlled_length - self._h2_state.acknowledge_received_data(amount, stream_id) - self._write_outgoing_data(request) - yield event.data - elif isinstance(event, h2.events.StreamEnded): - break - - def _receive_stream_event( - self, request: Request, stream_id: int - ) -> typing.Union[ - h2.events.ResponseReceived, h2.events.DataReceived, h2.events.StreamEnded - ]: - """ - Return the next available event for a given stream ID. - - Will read more data from the network if required. - """ - while not self._events.get(stream_id): - self._receive_events(request, stream_id) - event = self._events[stream_id].pop(0) - if isinstance(event, h2.events.StreamReset): - raise RemoteProtocolError(event) - return event - - def _receive_events( - self, request: Request, stream_id: typing.Optional[int] = None - ) -> None: - """ - Read some data from the network until we see one or more events - for a given stream ID. - """ - with self._read_lock: - if self._connection_terminated is not None: - last_stream_id = self._connection_terminated.last_stream_id - if stream_id and last_stream_id and stream_id > last_stream_id: - self._request_count -= 1 - raise ConnectionNotAvailable() - raise RemoteProtocolError(self._connection_terminated) - - # This conditional is a bit icky. We don't want to block reading if we've - # actually got an event to return for a given stream. We need to do that - # check *within* the atomic read lock. Though it also need to be optional, - # because when we call it from `_wait_for_outgoing_flow` we *do* want to - # block until we've available flow control, event when we have events - # pending for the stream ID we're attempting to send on. - if stream_id is None or not self._events.get(stream_id): - events = self._read_incoming_data(request) - for event in events: - if isinstance(event, h2.events.RemoteSettingsChanged): - with Trace( - "receive_remote_settings", logger, request - ) as trace: - self._receive_remote_settings_change(event) - trace.return_value = event - - elif isinstance( - event, - ( - h2.events.ResponseReceived, - h2.events.DataReceived, - h2.events.StreamEnded, - h2.events.StreamReset, - ), - ): - if event.stream_id in self._events: - self._events[event.stream_id].append(event) - - elif isinstance(event, h2.events.ConnectionTerminated): - self._connection_terminated = event - - self._write_outgoing_data(request) - - def _receive_remote_settings_change(self, event: h2.events.Event) -> None: - max_concurrent_streams = event.changed_settings.get( - h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS - ) - if max_concurrent_streams: - new_max_streams = min( - max_concurrent_streams.new_value, - self._h2_state.local_settings.max_concurrent_streams, - ) - if new_max_streams and new_max_streams != self._max_streams: - while new_max_streams > self._max_streams: - self._max_streams_semaphore.release() - self._max_streams += 1 - while new_max_streams < self._max_streams: - self._max_streams_semaphore.acquire() - self._max_streams -= 1 - - def _response_closed(self, stream_id: int) -> None: - self._max_streams_semaphore.release() - del self._events[stream_id] - with self._state_lock: - if self._connection_terminated and not self._events: - self.close() - - elif self._state == HTTPConnectionState.ACTIVE and not self._events: - self._state = HTTPConnectionState.IDLE - if self._keepalive_expiry is not None: - now = time.monotonic() - self._expire_at = now + self._keepalive_expiry - if self._used_all_stream_ids: # pragma: nocover - self.close() - - def close(self) -> None: - # Note that this method unilaterally closes the connection, and does - # not have any kind of locking in place around it. - self._h2_state.close_connection() - self._state = HTTPConnectionState.CLOSED - self._network_stream.close() - - # Wrappers around network read/write operations... - - def _read_incoming_data( - self, request: Request - ) -> typing.List[h2.events.Event]: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("read", None) - - if self._read_exception is not None: - raise self._read_exception # pragma: nocover - - try: - data = self._network_stream.read(self.READ_NUM_BYTES, timeout) - if data == b"": - raise RemoteProtocolError("Server disconnected") - except Exception as exc: - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future reads. - # (For example, this means that a single read timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._read_exception = exc - self._connection_error = True - raise exc - - events: typing.List[h2.events.Event] = self._h2_state.receive_data(data) - - return events - - def _write_outgoing_data(self, request: Request) -> None: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("write", None) - - with self._write_lock: - data_to_send = self._h2_state.data_to_send() - - if self._write_exception is not None: - raise self._write_exception # pragma: nocover - - try: - self._network_stream.write(data_to_send, timeout) - except Exception as exc: # pragma: nocover - # If we get a network error we should: - # - # 1. Save the exception and just raise it immediately on any future write. - # (For example, this means that a single write timeout or disconnect will - # immediately close all pending streams. Without requiring multiple - # sequential timeouts.) - # 2. Mark the connection as errored, so that we don't accept any other - # incoming requests. - self._write_exception = exc - self._connection_error = True - raise exc - - # Flow control... - - def _wait_for_outgoing_flow(self, request: Request, stream_id: int) -> int: - """ - Returns the maximum allowable outgoing flow for a given stream. - - If the allowable flow is zero, then waits on the network until - WindowUpdated frames have increased the flow rate. - https://tools.ietf.org/html/rfc7540#section-6.9 - """ - local_flow: int = self._h2_state.local_flow_control_window(stream_id) - max_frame_size: int = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - while flow == 0: - self._receive_events(request) - local_flow = self._h2_state.local_flow_control_window(stream_id) - max_frame_size = self._h2_state.max_outbound_frame_size - flow = min(local_flow, max_frame_size) - return flow - - # Interface for connection pooling... - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._origin - - def is_available(self) -> bool: - return ( - self._state != HTTPConnectionState.CLOSED - and not self._connection_error - and not self._used_all_stream_ids - and not ( - self._h2_state.state_machine.state - == h2.connection.ConnectionState.CLOSED - ) - ) - - def has_expired(self) -> bool: - now = time.monotonic() - return self._expire_at is not None and now > self._expire_at - - def is_idle(self) -> bool: - return self._state == HTTPConnectionState.IDLE - - def is_closed(self) -> bool: - return self._state == HTTPConnectionState.CLOSED - - def info(self) -> str: - origin = str(self._origin) - return ( - f"{origin!r}, HTTP/2, {self._state.name}, " - f"Request Count: {self._request_count}" - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - origin = str(self._origin) - return ( - f"<{class_name} [{origin!r}, {self._state.name}, " - f"Request Count: {self._request_count}]>" - ) - - # These context managers are not used in the standard flow, but are - # useful for testing or working with connection instances directly. - - def __enter__(self) -> "HTTP2Connection": - return self - - def __exit__( - self, - exc_type: typing.Optional[typing.Type[BaseException]] = None, - exc_value: typing.Optional[BaseException] = None, - traceback: typing.Optional[types.TracebackType] = None, - ) -> None: - self.close() - - -class HTTP2ConnectionByteStream: - def __init__( - self, connection: HTTP2Connection, request: Request, stream_id: int - ) -> None: - self._connection = connection - self._request = request - self._stream_id = stream_id - self._closed = False - - def __iter__(self) -> typing.Iterator[bytes]: - kwargs = {"request": self._request, "stream_id": self._stream_id} - try: - with Trace("receive_response_body", logger, self._request, kwargs): - for chunk in self._connection._receive_response_body( - request=self._request, stream_id=self._stream_id - ): - yield chunk - except BaseException as exc: - # If we get an exception while streaming the response, - # we want to close the response (and possibly the connection) - # before raising that exception. - with ShieldCancellation(): - self.close() - raise exc - - def close(self) -> None: - if not self._closed: - self._closed = True - kwargs = {"stream_id": self._stream_id} - with Trace("response_closed", logger, self._request, kwargs): - self._connection._response_closed(stream_id=self._stream_id) diff --git a/myenv/Lib/site-packages/httpcore/_sync/http_proxy.py b/myenv/Lib/site-packages/httpcore/_sync/http_proxy.py deleted file mode 100644 index 6acac9a..0000000 --- a/myenv/Lib/site-packages/httpcore/_sync/http_proxy.py +++ /dev/null @@ -1,368 +0,0 @@ -import logging -import ssl -from base64 import b64encode -from typing import Iterable, List, Mapping, Optional, Sequence, Tuple, Union - -from .._backends.base import SOCKET_OPTION, NetworkBackend -from .._exceptions import ProxyError -from .._models import ( - URL, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, -) -from .._ssl import default_ssl_context -from .._synchronization import Lock -from .._trace import Trace -from .connection import HTTPConnection -from .connection_pool import ConnectionPool -from .http11 import HTTP11Connection -from .interfaces import ConnectionInterface - -HeadersAsSequence = Sequence[Tuple[Union[bytes, str], Union[bytes, str]]] -HeadersAsMapping = Mapping[Union[bytes, str], Union[bytes, str]] - - -logger = logging.getLogger("httpcore.proxy") - - -def merge_headers( - default_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - override_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, -) -> List[Tuple[bytes, bytes]]: - """ - Append default_headers and override_headers, de-duplicating if a key exists - in both cases. - """ - default_headers = [] if default_headers is None else list(default_headers) - override_headers = [] if override_headers is None else list(override_headers) - has_override = set(key.lower() for key, value in override_headers) - default_headers = [ - (key, value) - for key, value in default_headers - if key.lower() not in has_override - ] - return default_headers + override_headers - - -def build_auth_header(username: bytes, password: bytes) -> bytes: - userpass = username + b":" + password - return b"Basic " + b64encode(userpass) - - -class HTTPProxy(ConnectionPool): - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: Union[URL, bytes, str], - proxy_auth: Optional[Tuple[Union[bytes, str], Union[bytes, str]]] = None, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - max_connections: Optional[int] = 10, - max_keepalive_connections: Optional[int] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - local_address: Optional[str] = None, - uds: Optional[str] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - proxy_auth: Any proxy authentication as a two-tuple of - (username, password). May be either bytes or ascii-only str. - proxy_headers: Any HTTP headers to use for the proxy requests. - For example `{"Proxy-Authorization": "Basic :"}`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - proxy_ssl_context: The same as `ssl_context`, but for a proxy server rather than a remote origin. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - local_address=local_address, - uds=uds, - socket_options=socket_options, - ) - - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if ( - self._proxy_url.scheme == b"http" and proxy_ssl_context is not None - ): # pragma: no cover - raise RuntimeError( - "The `proxy_ssl_context` argument is not allowed for the http scheme" - ) - - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - if proxy_auth is not None: - username = enforce_bytes(proxy_auth[0], name="proxy_auth") - password = enforce_bytes(proxy_auth[1], name="proxy_auth") - authorization = build_auth_header(username, password) - self._proxy_headers = [ - (b"Proxy-Authorization", authorization) - ] + self._proxy_headers - - def create_connection(self, origin: Origin) -> ConnectionInterface: - if origin.scheme == b"http": - return ForwardHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - keepalive_expiry=self._keepalive_expiry, - network_backend=self._network_backend, - proxy_ssl_context=self._proxy_ssl_context, - ) - return TunnelHTTPConnection( - proxy_origin=self._proxy_url.origin, - proxy_headers=self._proxy_headers, - remote_origin=origin, - ssl_context=self._ssl_context, - proxy_ssl_context=self._proxy_ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class ForwardHTTPConnection(ConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_headers: Union[HeadersAsMapping, HeadersAsSequence, None] = None, - keepalive_expiry: Optional[float] = None, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - ) -> None: - self._connection = HTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._remote_origin = remote_origin - - def handle_request(self, request: Request) -> Response: - headers = merge_headers(self._proxy_headers, request.headers) - url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=bytes(request.url), - ) - proxy_request = Request( - method=request.method, - url=url, - headers=headers, - content=request.stream, - extensions=request.extensions, - ) - return self._connection.handle_request(proxy_request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - def close(self) -> None: - self._connection.close() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" - - -class TunnelHTTPConnection(ConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - ssl_context: Optional[ssl.SSLContext] = None, - proxy_ssl_context: Optional[ssl.SSLContext] = None, - proxy_headers: Optional[Sequence[Tuple[bytes, bytes]]] = None, - keepalive_expiry: Optional[float] = None, - http1: bool = True, - http2: bool = False, - network_backend: Optional[NetworkBackend] = None, - socket_options: Optional[Iterable[SOCKET_OPTION]] = None, - ) -> None: - self._connection: ConnectionInterface = HTTPConnection( - origin=proxy_origin, - keepalive_expiry=keepalive_expiry, - network_backend=network_backend, - socket_options=socket_options, - ssl_context=proxy_ssl_context, - ) - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._ssl_context = ssl_context - self._proxy_ssl_context = proxy_ssl_context - self._proxy_headers = enforce_headers(proxy_headers, name="proxy_headers") - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - self._connect_lock = Lock() - self._connected = False - - def handle_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - timeout = timeouts.get("connect", None) - - with self._connect_lock: - if not self._connected: - target = b"%b:%d" % (self._remote_origin.host, self._remote_origin.port) - - connect_url = URL( - scheme=self._proxy_origin.scheme, - host=self._proxy_origin.host, - port=self._proxy_origin.port, - target=target, - ) - connect_headers = merge_headers( - [(b"Host", target), (b"Accept", b"*/*")], self._proxy_headers - ) - connect_request = Request( - method=b"CONNECT", - url=connect_url, - headers=connect_headers, - extensions=request.extensions, - ) - connect_response = self._connection.handle_request( - connect_request - ) - - if connect_response.status < 200 or connect_response.status > 299: - reason_bytes = connect_response.extensions.get("reason_phrase", b"") - reason_str = reason_bytes.decode("ascii", errors="ignore") - msg = "%d %s" % (connect_response.status, reason_str) - self._connection.close() - raise ProxyError(msg) - - stream = connect_response.extensions["network_stream"] - - # Upgrade the stream to SSL - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - with Trace("start_tls", logger, request, kwargs) as trace: - stream = stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or (self._http2 and not self._http1): - from .http2 import HTTP2Connection - - self._connection = HTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = HTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - - self._connected = True - return self._connection.handle_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - def close(self) -> None: - self._connection.close() - - def info(self) -> str: - return self._connection.info() - - def is_available(self) -> bool: - return self._connection.is_available() - - def has_expired(self) -> bool: - return self._connection.has_expired() - - def is_idle(self) -> bool: - return self._connection.is_idle() - - def is_closed(self) -> bool: - return self._connection.is_closed() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/myenv/Lib/site-packages/httpcore/_sync/interfaces.py b/myenv/Lib/site-packages/httpcore/_sync/interfaces.py deleted file mode 100644 index 5e95be1..0000000 --- a/myenv/Lib/site-packages/httpcore/_sync/interfaces.py +++ /dev/null @@ -1,135 +0,0 @@ -from contextlib import contextmanager -from typing import Iterator, Optional, Union - -from .._models import ( - URL, - Extensions, - HeaderTypes, - Origin, - Request, - Response, - enforce_bytes, - enforce_headers, - enforce_url, - include_request_headers, -) - - -class RequestInterface: - def request( - self, - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> Response: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = self.handle_request(request) - try: - response.read() - finally: - response.close() - return response - - @contextmanager - def stream( - self, - method: Union[bytes, str], - url: Union[URL, bytes, str], - *, - headers: HeaderTypes = None, - content: Union[bytes, Iterator[bytes], None] = None, - extensions: Optional[Extensions] = None, - ) -> Iterator[Response]: - # Strict type checking on our parameters. - method = enforce_bytes(method, name="method") - url = enforce_url(url, name="url") - headers = enforce_headers(headers, name="headers") - - # Include Host header, and optionally Content-Length or Transfer-Encoding. - headers = include_request_headers(headers, url=url, content=content) - - request = Request( - method=method, - url=url, - headers=headers, - content=content, - extensions=extensions, - ) - response = self.handle_request(request) - try: - yield response - finally: - response.close() - - def handle_request(self, request: Request) -> Response: - raise NotImplementedError() # pragma: nocover - - -class ConnectionInterface(RequestInterface): - def close(self) -> None: - raise NotImplementedError() # pragma: nocover - - def info(self) -> str: - raise NotImplementedError() # pragma: nocover - - def can_handle_request(self, origin: Origin) -> bool: - raise NotImplementedError() # pragma: nocover - - def is_available(self) -> bool: - """ - Return `True` if the connection is currently able to accept an - outgoing request. - - An HTTP/1.1 connection will only be available if it is currently idle. - - An HTTP/2 connection will be available so long as the stream ID space is - not yet exhausted, and the connection is not in an error state. - - While the connection is being established we may not yet know if it is going - to result in an HTTP/1.1 or HTTP/2 connection. The connection should be - treated as being available, but might ultimately raise `NewConnectionRequired` - required exceptions if multiple requests are attempted over a connection - that ends up being established as HTTP/1.1. - """ - raise NotImplementedError() # pragma: nocover - - def has_expired(self) -> bool: - """ - Return `True` if the connection is in a state where it should be closed. - - This either means that the connection is idle and it has passed the - expiry time on its keep-alive, or that server has sent an EOF. - """ - raise NotImplementedError() # pragma: nocover - - def is_idle(self) -> bool: - """ - Return `True` if the connection is currently idle. - """ - raise NotImplementedError() # pragma: nocover - - def is_closed(self) -> bool: - """ - Return `True` if the connection has been closed. - - Used when a response is closed to determine if the connection may be - returned to the connection pool or not. - """ - raise NotImplementedError() # pragma: nocover diff --git a/myenv/Lib/site-packages/httpcore/_sync/socks_proxy.py b/myenv/Lib/site-packages/httpcore/_sync/socks_proxy.py deleted file mode 100644 index 502e4d7..0000000 --- a/myenv/Lib/site-packages/httpcore/_sync/socks_proxy.py +++ /dev/null @@ -1,342 +0,0 @@ -import logging -import ssl -import typing - -from socksio import socks5 - -from .._backends.sync import SyncBackend -from .._backends.base import NetworkBackend, NetworkStream -from .._exceptions import ConnectionNotAvailable, ProxyError -from .._models import URL, Origin, Request, Response, enforce_bytes, enforce_url -from .._ssl import default_ssl_context -from .._synchronization import Lock -from .._trace import Trace -from .connection_pool import ConnectionPool -from .http11 import HTTP11Connection -from .interfaces import ConnectionInterface - -logger = logging.getLogger("httpcore.socks") - - -AUTH_METHODS = { - b"\x00": "NO AUTHENTICATION REQUIRED", - b"\x01": "GSSAPI", - b"\x02": "USERNAME/PASSWORD", - b"\xff": "NO ACCEPTABLE METHODS", -} - -REPLY_CODES = { - b"\x00": "Succeeded", - b"\x01": "General SOCKS server failure", - b"\x02": "Connection not allowed by ruleset", - b"\x03": "Network unreachable", - b"\x04": "Host unreachable", - b"\x05": "Connection refused", - b"\x06": "TTL expired", - b"\x07": "Command not supported", - b"\x08": "Address type not supported", -} - - -def _init_socks5_connection( - stream: NetworkStream, - *, - host: bytes, - port: int, - auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, -) -> None: - conn = socks5.SOCKS5Connection() - - # Auth method request - auth_method = ( - socks5.SOCKS5AuthMethod.NO_AUTH_REQUIRED - if auth is None - else socks5.SOCKS5AuthMethod.USERNAME_PASSWORD - ) - conn.send(socks5.SOCKS5AuthMethodsRequest([auth_method])) - outgoing_bytes = conn.data_to_send() - stream.write(outgoing_bytes) - - # Auth method response - incoming_bytes = stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5AuthReply) - if response.method != auth_method: - requested = AUTH_METHODS.get(auth_method, "UNKNOWN") - responded = AUTH_METHODS.get(response.method, "UNKNOWN") - raise ProxyError( - f"Requested {requested} from proxy server, but got {responded}." - ) - - if response.method == socks5.SOCKS5AuthMethod.USERNAME_PASSWORD: - # Username/password request - assert auth is not None - username, password = auth - conn.send(socks5.SOCKS5UsernamePasswordRequest(username, password)) - outgoing_bytes = conn.data_to_send() - stream.write(outgoing_bytes) - - # Username/password response - incoming_bytes = stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5UsernamePasswordReply) - if not response.success: - raise ProxyError("Invalid username/password") - - # Connect request - conn.send( - socks5.SOCKS5CommandRequest.from_address( - socks5.SOCKS5Command.CONNECT, (host, port) - ) - ) - outgoing_bytes = conn.data_to_send() - stream.write(outgoing_bytes) - - # Connect response - incoming_bytes = stream.read(max_bytes=4096) - response = conn.receive_data(incoming_bytes) - assert isinstance(response, socks5.SOCKS5Reply) - if response.reply_code != socks5.SOCKS5ReplyCode.SUCCEEDED: - reply_code = REPLY_CODES.get(response.reply_code, "UNKOWN") - raise ProxyError(f"Proxy Server could not connect: {reply_code}.") - - -class SOCKSProxy(ConnectionPool): - """ - A connection pool that sends requests via an HTTP proxy. - """ - - def __init__( - self, - proxy_url: typing.Union[URL, bytes, str], - proxy_auth: typing.Optional[ - typing.Tuple[typing.Union[bytes, str], typing.Union[bytes, str]] - ] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - max_connections: typing.Optional[int] = 10, - max_keepalive_connections: typing.Optional[int] = None, - keepalive_expiry: typing.Optional[float] = None, - http1: bool = True, - http2: bool = False, - retries: int = 0, - network_backend: typing.Optional[NetworkBackend] = None, - ) -> None: - """ - A connection pool for making HTTP requests. - - Parameters: - proxy_url: The URL to use when connecting to the proxy server. - For example `"http://127.0.0.1:8080/"`. - ssl_context: An SSL context to use for verifying connections. - If not specified, the default `httpcore.default_ssl_context()` - will be used. - max_connections: The maximum number of concurrent HTTP connections that - the pool should allow. Any attempt to send a request on a pool that - would exceed this amount will block until a connection is available. - max_keepalive_connections: The maximum number of idle HTTP connections - that will be maintained in the pool. - keepalive_expiry: The duration in seconds that an idle HTTP connection - may be maintained for before being expired from the pool. - http1: A boolean indicating if HTTP/1.1 requests should be supported - by the connection pool. Defaults to True. - http2: A boolean indicating if HTTP/2 requests should be supported by - the connection pool. Defaults to False. - retries: The maximum number of retries when trying to establish - a connection. - local_address: Local address to connect from. Can also be used to - connect using a particular address family. Using - `local_address="0.0.0.0"` will connect using an `AF_INET` address - (IPv4), while using `local_address="::"` will connect using an - `AF_INET6` address (IPv6). - uds: Path to a Unix Domain Socket to use instead of TCP sockets. - network_backend: A backend instance to use for handling network I/O. - """ - super().__init__( - ssl_context=ssl_context, - max_connections=max_connections, - max_keepalive_connections=max_keepalive_connections, - keepalive_expiry=keepalive_expiry, - http1=http1, - http2=http2, - network_backend=network_backend, - retries=retries, - ) - self._ssl_context = ssl_context - self._proxy_url = enforce_url(proxy_url, name="proxy_url") - if proxy_auth is not None: - username, password = proxy_auth - username_bytes = enforce_bytes(username, name="proxy_auth") - password_bytes = enforce_bytes(password, name="proxy_auth") - self._proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = ( - username_bytes, - password_bytes, - ) - else: - self._proxy_auth = None - - def create_connection(self, origin: Origin) -> ConnectionInterface: - return Socks5Connection( - proxy_origin=self._proxy_url.origin, - remote_origin=origin, - proxy_auth=self._proxy_auth, - ssl_context=self._ssl_context, - keepalive_expiry=self._keepalive_expiry, - http1=self._http1, - http2=self._http2, - network_backend=self._network_backend, - ) - - -class Socks5Connection(ConnectionInterface): - def __init__( - self, - proxy_origin: Origin, - remote_origin: Origin, - proxy_auth: typing.Optional[typing.Tuple[bytes, bytes]] = None, - ssl_context: typing.Optional[ssl.SSLContext] = None, - keepalive_expiry: typing.Optional[float] = None, - http1: bool = True, - http2: bool = False, - network_backend: typing.Optional[NetworkBackend] = None, - ) -> None: - self._proxy_origin = proxy_origin - self._remote_origin = remote_origin - self._proxy_auth = proxy_auth - self._ssl_context = ssl_context - self._keepalive_expiry = keepalive_expiry - self._http1 = http1 - self._http2 = http2 - - self._network_backend: NetworkBackend = ( - SyncBackend() if network_backend is None else network_backend - ) - self._connect_lock = Lock() - self._connection: typing.Optional[ConnectionInterface] = None - self._connect_failed = False - - def handle_request(self, request: Request) -> Response: - timeouts = request.extensions.get("timeout", {}) - sni_hostname = request.extensions.get("sni_hostname", None) - timeout = timeouts.get("connect", None) - - with self._connect_lock: - if self._connection is None: - try: - # Connect to the proxy - kwargs = { - "host": self._proxy_origin.host.decode("ascii"), - "port": self._proxy_origin.port, - "timeout": timeout, - } - with Trace("connect_tcp", logger, request, kwargs) as trace: - stream = self._network_backend.connect_tcp(**kwargs) - trace.return_value = stream - - # Connect to the remote host using socks5 - kwargs = { - "stream": stream, - "host": self._remote_origin.host.decode("ascii"), - "port": self._remote_origin.port, - "auth": self._proxy_auth, - } - with Trace( - "setup_socks5_connection", logger, request, kwargs - ) as trace: - _init_socks5_connection(**kwargs) - trace.return_value = stream - - # Upgrade the stream to SSL - if self._remote_origin.scheme == b"https": - ssl_context = ( - default_ssl_context() - if self._ssl_context is None - else self._ssl_context - ) - alpn_protocols = ( - ["http/1.1", "h2"] if self._http2 else ["http/1.1"] - ) - ssl_context.set_alpn_protocols(alpn_protocols) - - kwargs = { - "ssl_context": ssl_context, - "server_hostname": sni_hostname - or self._remote_origin.host.decode("ascii"), - "timeout": timeout, - } - with Trace("start_tls", logger, request, kwargs) as trace: - stream = stream.start_tls(**kwargs) - trace.return_value = stream - - # Determine if we should be using HTTP/1.1 or HTTP/2 - ssl_object = stream.get_extra_info("ssl_object") - http2_negotiated = ( - ssl_object is not None - and ssl_object.selected_alpn_protocol() == "h2" - ) - - # Create the HTTP/1.1 or HTTP/2 connection - if http2_negotiated or ( - self._http2 and not self._http1 - ): # pragma: nocover - from .http2 import HTTP2Connection - - self._connection = HTTP2Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - else: - self._connection = HTTP11Connection( - origin=self._remote_origin, - stream=stream, - keepalive_expiry=self._keepalive_expiry, - ) - except Exception as exc: - self._connect_failed = True - raise exc - elif not self._connection.is_available(): # pragma: nocover - raise ConnectionNotAvailable() - - return self._connection.handle_request(request) - - def can_handle_request(self, origin: Origin) -> bool: - return origin == self._remote_origin - - def close(self) -> None: - if self._connection is not None: - self._connection.close() - - def is_available(self) -> bool: - if self._connection is None: # pragma: nocover - # If HTTP/2 support is enabled, and the resulting connection could - # end up as HTTP/2 then we should indicate the connection as being - # available to service multiple requests. - return ( - self._http2 - and (self._remote_origin.scheme == b"https" or not self._http1) - and not self._connect_failed - ) - return self._connection.is_available() - - def has_expired(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.has_expired() - - def is_idle(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_idle() - - def is_closed(self) -> bool: - if self._connection is None: # pragma: nocover - return self._connect_failed - return self._connection.is_closed() - - def info(self) -> str: - if self._connection is None: # pragma: nocover - return "CONNECTION FAILED" if self._connect_failed else "CONNECTING" - return self._connection.info() - - def __repr__(self) -> str: - return f"<{self.__class__.__name__} [{self.info()}]>" diff --git a/myenv/Lib/site-packages/httpcore/_synchronization.py b/myenv/Lib/site-packages/httpcore/_synchronization.py deleted file mode 100644 index 50cfefe..0000000 --- a/myenv/Lib/site-packages/httpcore/_synchronization.py +++ /dev/null @@ -1,317 +0,0 @@ -import threading -from types import TracebackType -from typing import Optional, Type - -from ._exceptions import ExceptionMapping, PoolTimeout, map_exceptions - -# Our async synchronization primatives use either 'anyio' or 'trio' depending -# on if they're running under asyncio or trio. - -try: - import trio -except (ImportError, NotImplementedError): # pragma: nocover - trio = None # type: ignore - -try: - import anyio -except ImportError: # pragma: nocover - anyio = None # type: ignore - - -def current_async_library() -> str: - # Determine if we're running under trio or asyncio. - # See https://sniffio.readthedocs.io/en/latest/ - try: - import sniffio - except ImportError: # pragma: nocover - environment = "asyncio" - else: - environment = sniffio.current_async_library() - - if environment not in ("asyncio", "trio"): # pragma: nocover - raise RuntimeError("Running under an unsupported async environment.") - - if environment == "asyncio" and anyio is None: # pragma: nocover - raise RuntimeError( - "Running with asyncio requires installation of 'httpcore[asyncio]'." - ) - - if environment == "trio" and trio is None: # pragma: nocover - raise RuntimeError( - "Running with trio requires installation of 'httpcore[trio]'." - ) - - return environment - - -class AsyncLock: - """ - This is a standard lock. - - In the sync case `Lock` provides thread locking. - In the async case `AsyncLock` provides async locking. - """ - - def __init__(self) -> None: - self._backend = "" - - def setup(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a lock with the correct implementation. - """ - self._backend = current_async_library() - if self._backend == "trio": - self._trio_lock = trio.Lock() - elif self._backend == "asyncio": - self._anyio_lock = anyio.Lock() - - async def __aenter__(self) -> "AsyncLock": - if not self._backend: - self.setup() - - if self._backend == "trio": - await self._trio_lock.acquire() - elif self._backend == "asyncio": - await self._anyio_lock.acquire() - - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - if self._backend == "trio": - self._trio_lock.release() - elif self._backend == "asyncio": - self._anyio_lock.release() - - -class AsyncThreadLock: - """ - This is a threading-only lock for no-I/O contexts. - - In the sync case `ThreadLock` provides thread locking. - In the async case `AsyncThreadLock` is a no-op. - """ - - def __enter__(self) -> "AsyncThreadLock": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - pass - - -class AsyncEvent: - def __init__(self) -> None: - self._backend = "" - - def setup(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a lock with the correct implementation. - """ - self._backend = current_async_library() - if self._backend == "trio": - self._trio_event = trio.Event() - elif self._backend == "asyncio": - self._anyio_event = anyio.Event() - - def set(self) -> None: - if not self._backend: - self.setup() - - if self._backend == "trio": - self._trio_event.set() - elif self._backend == "asyncio": - self._anyio_event.set() - - async def wait(self, timeout: Optional[float] = None) -> None: - if not self._backend: - self.setup() - - if self._backend == "trio": - trio_exc_map: ExceptionMapping = {trio.TooSlowError: PoolTimeout} - timeout_or_inf = float("inf") if timeout is None else timeout - with map_exceptions(trio_exc_map): - with trio.fail_after(timeout_or_inf): - await self._trio_event.wait() - elif self._backend == "asyncio": - anyio_exc_map: ExceptionMapping = {TimeoutError: PoolTimeout} - with map_exceptions(anyio_exc_map): - with anyio.fail_after(timeout): - await self._anyio_event.wait() - - -class AsyncSemaphore: - def __init__(self, bound: int) -> None: - self._bound = bound - self._backend = "" - - def setup(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a semaphore with the correct implementation. - """ - self._backend = current_async_library() - if self._backend == "trio": - self._trio_semaphore = trio.Semaphore( - initial_value=self._bound, max_value=self._bound - ) - elif self._backend == "asyncio": - self._anyio_semaphore = anyio.Semaphore( - initial_value=self._bound, max_value=self._bound - ) - - async def acquire(self) -> None: - if not self._backend: - self.setup() - - if self._backend == "trio": - await self._trio_semaphore.acquire() - elif self._backend == "asyncio": - await self._anyio_semaphore.acquire() - - async def release(self) -> None: - if self._backend == "trio": - self._trio_semaphore.release() - elif self._backend == "asyncio": - self._anyio_semaphore.release() - - -class AsyncShieldCancellation: - # For certain portions of our codebase where we're dealing with - # closing connections during exception handling we want to shield - # the operation from being cancelled. - # - # with AsyncShieldCancellation(): - # ... # clean-up operations, shielded from cancellation. - - def __init__(self) -> None: - """ - Detect if we're running under 'asyncio' or 'trio' and create - a shielded scope with the correct implementation. - """ - self._backend = current_async_library() - - if self._backend == "trio": - self._trio_shield = trio.CancelScope(shield=True) - elif self._backend == "asyncio": - self._anyio_shield = anyio.CancelScope(shield=True) - - def __enter__(self) -> "AsyncShieldCancellation": - if self._backend == "trio": - self._trio_shield.__enter__() - elif self._backend == "asyncio": - self._anyio_shield.__enter__() - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - if self._backend == "trio": - self._trio_shield.__exit__(exc_type, exc_value, traceback) - elif self._backend == "asyncio": - self._anyio_shield.__exit__(exc_type, exc_value, traceback) - - -# Our thread-based synchronization primitives... - - -class Lock: - """ - This is a standard lock. - - In the sync case `Lock` provides thread locking. - In the async case `AsyncLock` provides async locking. - """ - - def __init__(self) -> None: - self._lock = threading.Lock() - - def __enter__(self) -> "Lock": - self._lock.acquire() - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - self._lock.release() - - -class ThreadLock: - """ - This is a threading-only lock for no-I/O contexts. - - In the sync case `ThreadLock` provides thread locking. - In the async case `AsyncThreadLock` is a no-op. - """ - - def __init__(self) -> None: - self._lock = threading.Lock() - - def __enter__(self) -> "ThreadLock": - self._lock.acquire() - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - self._lock.release() - - -class Event: - def __init__(self) -> None: - self._event = threading.Event() - - def set(self) -> None: - self._event.set() - - def wait(self, timeout: Optional[float] = None) -> None: - if timeout == float("inf"): # pragma: no cover - timeout = None - if not self._event.wait(timeout=timeout): - raise PoolTimeout() # pragma: nocover - - -class Semaphore: - def __init__(self, bound: int) -> None: - self._semaphore = threading.Semaphore(value=bound) - - def acquire(self) -> None: - self._semaphore.acquire() - - def release(self) -> None: - self._semaphore.release() - - -class ShieldCancellation: - # Thread-synchronous codebases don't support cancellation semantics. - # We have this class because we need to mirror the async and sync - # cases within our package, but it's just a no-op. - def __enter__(self) -> "ShieldCancellation": - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - pass diff --git a/myenv/Lib/site-packages/httpcore/_trace.py b/myenv/Lib/site-packages/httpcore/_trace.py deleted file mode 100644 index b122a53..0000000 --- a/myenv/Lib/site-packages/httpcore/_trace.py +++ /dev/null @@ -1,105 +0,0 @@ -import inspect -import logging -from types import TracebackType -from typing import Any, Dict, Optional, Type - -from ._models import Request - - -class Trace: - def __init__( - self, - name: str, - logger: logging.Logger, - request: Optional[Request] = None, - kwargs: Optional[Dict[str, Any]] = None, - ) -> None: - self.name = name - self.logger = logger - self.trace_extension = ( - None if request is None else request.extensions.get("trace") - ) - self.debug = self.logger.isEnabledFor(logging.DEBUG) - self.kwargs = kwargs or {} - self.return_value: Any = None - self.should_trace = self.debug or self.trace_extension is not None - self.prefix = self.logger.name.split(".")[-1] - - def trace(self, name: str, info: Dict[str, Any]) -> None: - if self.trace_extension is not None: - prefix_and_name = f"{self.prefix}.{name}" - ret = self.trace_extension(prefix_and_name, info) - if inspect.iscoroutine(ret): # pragma: no cover - raise TypeError( - "If you are using a synchronous interface, " - "the callback of the `trace` extension should " - "be a normal function instead of an asynchronous function." - ) - - if self.debug: - if not info or "return_value" in info and info["return_value"] is None: - message = name - else: - args = " ".join([f"{key}={value!r}" for key, value in info.items()]) - message = f"{name} {args}" - self.logger.debug(message) - - def __enter__(self) -> "Trace": - if self.should_trace: - info = self.kwargs - self.trace(f"{self.name}.started", info) - return self - - def __exit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - if self.should_trace: - if exc_value is None: - info = {"return_value": self.return_value} - self.trace(f"{self.name}.complete", info) - else: - info = {"exception": exc_value} - self.trace(f"{self.name}.failed", info) - - async def atrace(self, name: str, info: Dict[str, Any]) -> None: - if self.trace_extension is not None: - prefix_and_name = f"{self.prefix}.{name}" - coro = self.trace_extension(prefix_and_name, info) - if not inspect.iscoroutine(coro): # pragma: no cover - raise TypeError( - "If you're using an asynchronous interface, " - "the callback of the `trace` extension should " - "be an asynchronous function rather than a normal function." - ) - await coro - - if self.debug: - if not info or "return_value" in info and info["return_value"] is None: - message = name - else: - args = " ".join([f"{key}={value!r}" for key, value in info.items()]) - message = f"{name} {args}" - self.logger.debug(message) - - async def __aenter__(self) -> "Trace": - if self.should_trace: - info = self.kwargs - await self.atrace(f"{self.name}.started", info) - return self - - async def __aexit__( - self, - exc_type: Optional[Type[BaseException]] = None, - exc_value: Optional[BaseException] = None, - traceback: Optional[TracebackType] = None, - ) -> None: - if self.should_trace: - if exc_value is None: - info = {"return_value": self.return_value} - await self.atrace(f"{self.name}.complete", info) - else: - info = {"exception": exc_value} - await self.atrace(f"{self.name}.failed", info) diff --git a/myenv/Lib/site-packages/httpcore/_utils.py b/myenv/Lib/site-packages/httpcore/_utils.py deleted file mode 100644 index df5dea8..0000000 --- a/myenv/Lib/site-packages/httpcore/_utils.py +++ /dev/null @@ -1,36 +0,0 @@ -import select -import socket -import sys -import typing - - -def is_socket_readable(sock: typing.Optional[socket.socket]) -> bool: - """ - Return whether a socket, as identifed by its file descriptor, is readable. - "A socket is readable" means that the read buffer isn't empty, i.e. that calling - .recv() on it would immediately return some data. - """ - # NOTE: we want check for readability without actually attempting to read, because - # we don't want to block forever if it's not readable. - - # In the case that the socket no longer exists, or cannot return a file - # descriptor, we treat it as being readable, as if it the next read operation - # on it is ready to return the terminating `b""`. - sock_fd = None if sock is None else sock.fileno() - if sock_fd is None or sock_fd < 0: # pragma: nocover - return True - - # The implementation below was stolen from: - # https://github.com/python-trio/trio/blob/20ee2b1b7376db637435d80e266212a35837ddcc/trio/_socket.py#L471-L478 - # See also: https://github.com/encode/httpcore/pull/193#issuecomment-703129316 - - # Use select.select on Windows, and when poll is unavailable and select.poll - # everywhere else. (E.g. When eventlet is in use. See #327) - if ( - sys.platform == "win32" or getattr(select, "poll", None) is None - ): # pragma: nocover - rready, _, _ = select.select([sock_fd], [], [], 0) - return bool(rready) - p = select.poll() - p.register(sock_fd, select.POLLIN) - return bool(p.poll(0)) diff --git a/myenv/Lib/site-packages/httpcore/py.typed b/myenv/Lib/site-packages/httpcore/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/INSTALLER b/myenv/Lib/site-packages/httpx-0.27.2.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/METADATA b/myenv/Lib/site-packages/httpx-0.27.2.dist-info/METADATA deleted file mode 100644 index 4c6a080..0000000 --- a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/METADATA +++ /dev/null @@ -1,207 +0,0 @@ -Metadata-Version: 2.3 -Name: httpx -Version: 0.27.2 -Summary: The next generation HTTP client. -Project-URL: Changelog, https://github.com/encode/httpx/blob/master/CHANGELOG.md -Project-URL: Documentation, https://www.python-httpx.org -Project-URL: Homepage, https://github.com/encode/httpx -Project-URL: Source, https://github.com/encode/httpx -Author-email: Tom Christie -License-Expression: BSD-3-Clause -License-File: LICENSE.md -Classifier: Development Status :: 4 - Beta -Classifier: Environment :: Web Environment -Classifier: Framework :: AsyncIO -Classifier: Framework :: Trio -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Topic :: Internet :: WWW/HTTP -Requires-Python: >=3.8 -Requires-Dist: anyio -Requires-Dist: certifi -Requires-Dist: httpcore==1.* -Requires-Dist: idna -Requires-Dist: sniffio -Provides-Extra: brotli -Requires-Dist: brotli; (platform_python_implementation == 'CPython') and extra == 'brotli' -Requires-Dist: brotlicffi; (platform_python_implementation != 'CPython') and extra == 'brotli' -Provides-Extra: cli -Requires-Dist: click==8.*; extra == 'cli' -Requires-Dist: pygments==2.*; extra == 'cli' -Requires-Dist: rich<14,>=10; extra == 'cli' -Provides-Extra: http2 -Requires-Dist: h2<5,>=3; extra == 'http2' -Provides-Extra: socks -Requires-Dist: socksio==1.*; extra == 'socks' -Provides-Extra: zstd -Requires-Dist: zstandard>=0.18.0; extra == 'zstd' -Description-Content-Type: text/markdown - -

- HTTPX -

- -

HTTPX - A next-generation HTTP client for Python.

- -

- - Test Suite - - - Package version - -

- -HTTPX is a fully featured HTTP client library for Python 3. It includes **an integrated -command line client**, has support for both **HTTP/1.1 and HTTP/2**, and provides both **sync -and async APIs**. - ---- - -Install HTTPX using pip: - -```shell -pip install httpx -``` - -Now, let's get started: - -```pycon ->>> import httpx ->>> r = httpx.get('https://www.example.org/') ->>> r - ->>> r.status_code -200 ->>> r.headers['content-type'] -'text/html; charset=UTF-8' ->>> r.text -'\n\n\nExample Domain...' -``` - -Or, using the command-line client. - -```shell -pip install 'httpx[cli]' # The command line client is an optional dependency. -``` - -Which now allows us to use HTTPX directly from the command-line... - -

- httpx --help -

- -Sending a request... - -

- httpx http://httpbin.org/json -

- -## Features - -HTTPX builds on the well-established usability of `requests`, and gives you: - -* A broadly [requests-compatible API](https://www.python-httpx.org/compatibility/). -* An integrated command-line client. -* HTTP/1.1 [and HTTP/2 support](https://www.python-httpx.org/http2/). -* Standard synchronous interface, but with [async support if you need it](https://www.python-httpx.org/async/). -* Ability to make requests directly to [WSGI applications](https://www.python-httpx.org/advanced/transports/#wsgi-transport) or [ASGI applications](https://www.python-httpx.org/advanced/transports/#asgi-transport). -* Strict timeouts everywhere. -* Fully type annotated. -* 100% test coverage. - -Plus all the standard features of `requests`... - -* International Domains and URLs -* Keep-Alive & Connection Pooling -* Sessions with Cookie Persistence -* Browser-style SSL Verification -* Basic/Digest Authentication -* Elegant Key/Value Cookies -* Automatic Decompression -* Automatic Content Decoding -* Unicode Response Bodies -* Multipart File Uploads -* HTTP(S) Proxy Support -* Connection Timeouts -* Streaming Downloads -* .netrc Support -* Chunked Requests - -## Installation - -Install with pip: - -```shell -pip install httpx -``` - -Or, to include the optional HTTP/2 support, use: - -```shell -pip install httpx[http2] -``` - -HTTPX requires Python 3.8+. - -## Documentation - -Project documentation is available at [https://www.python-httpx.org/](https://www.python-httpx.org/). - -For a run-through of all the basics, head over to the [QuickStart](https://www.python-httpx.org/quickstart/). - -For more advanced topics, see the [Advanced Usage](https://www.python-httpx.org/advanced/) section, the [async support](https://www.python-httpx.org/async/) section, or the [HTTP/2](https://www.python-httpx.org/http2/) section. - -The [Developer Interface](https://www.python-httpx.org/api/) provides a comprehensive API reference. - -To find out about tools that integrate with HTTPX, see [Third Party Packages](https://www.python-httpx.org/third_party_packages/). - -## Contribute - -If you want to contribute with HTTPX check out the [Contributing Guide](https://www.python-httpx.org/contributing/) to learn how to start. - -## Dependencies - -The HTTPX project relies on these excellent libraries: - -* `httpcore` - The underlying transport implementation for `httpx`. - * `h11` - HTTP/1.1 support. -* `certifi` - SSL certificates. -* `idna` - Internationalized domain name support. -* `sniffio` - Async library autodetection. - -As well as these optional installs: - -* `h2` - HTTP/2 support. *(Optional, with `httpx[http2]`)* -* `socksio` - SOCKS proxy support. *(Optional, with `httpx[socks]`)* -* `rich` - Rich terminal support. *(Optional, with `httpx[cli]`)* -* `click` - Command line client support. *(Optional, with `httpx[cli]`)* -* `brotli` or `brotlicffi` - Decoding for "brotli" compressed responses. *(Optional, with `httpx[brotli]`)* -* `zstandard` - Decoding for "zstd" compressed responses. *(Optional, with `httpx[zstd]`)* - -A huge amount of credit is due to `requests` for the API layout that -much of this work follows, as well as to `urllib3` for plenty of design -inspiration around the lower-level networking details. - ---- - -

HTTPX is BSD licensed code.
Designed & crafted with care.

— 🦋 —

- -## Release Information - -### Fixed - -* Reintroduced supposedly-private `URLTypes` shortcut. (#2673) - - ---- - -[Full changelog](https://github.com/encode/httpx/blob/master/CHANGELOG.md) diff --git a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/RECORD b/myenv/Lib/site-packages/httpx-0.27.2.dist-info/RECORD deleted file mode 100644 index cd4c257..0000000 --- a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/RECORD +++ /dev/null @@ -1,56 +0,0 @@ -../../Scripts/httpx.exe,sha256=_XlxD6-206Vdv-Fhh7AxStm0bQ7OVawWsIPGHGOci3g,108435 -httpx-0.27.2.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -httpx-0.27.2.dist-info/METADATA,sha256=vt_qxvRfNlYTOeaZ0pHUdimRoq3pHYQCfHeR2ZIsMQE,7103 -httpx-0.27.2.dist-info/RECORD,, -httpx-0.27.2.dist-info/WHEEL,sha256=1yFddiXMmvYK7QYTqtRNtX66WJ0Mz8PYEiEUoOUUxRY,87 -httpx-0.27.2.dist-info/entry_points.txt,sha256=2lVkdQmxLA1pNMgSN2eV89o90HCZezhmNwsy6ryKDSA,37 -httpx-0.27.2.dist-info/licenses/LICENSE.md,sha256=TsWdVE8StfU5o6cW_TIaxYzNgDC0ZSIfLIgCAM3yjY0,1508 -httpx/__init__.py,sha256=CsaZe6yZj0rHg6322AWKWHGTMVr9txgEfD5P3_Rrz60,2171 -httpx/__pycache__/__init__.cpython-312.pyc,, -httpx/__pycache__/__version__.cpython-312.pyc,, -httpx/__pycache__/_api.cpython-312.pyc,, -httpx/__pycache__/_auth.cpython-312.pyc,, -httpx/__pycache__/_client.cpython-312.pyc,, -httpx/__pycache__/_compat.cpython-312.pyc,, -httpx/__pycache__/_config.cpython-312.pyc,, -httpx/__pycache__/_content.cpython-312.pyc,, -httpx/__pycache__/_decoders.cpython-312.pyc,, -httpx/__pycache__/_exceptions.cpython-312.pyc,, -httpx/__pycache__/_main.cpython-312.pyc,, -httpx/__pycache__/_models.cpython-312.pyc,, -httpx/__pycache__/_multipart.cpython-312.pyc,, -httpx/__pycache__/_status_codes.cpython-312.pyc,, -httpx/__pycache__/_types.cpython-312.pyc,, -httpx/__pycache__/_urlparse.cpython-312.pyc,, -httpx/__pycache__/_urls.cpython-312.pyc,, -httpx/__pycache__/_utils.cpython-312.pyc,, -httpx/__version__.py,sha256=jmvSKcTIJ9sXDdDB3XqJ_YgYJFCqEofEHgm5SeeZ3hk,108 -httpx/_api.py,sha256=t4s7Uc7FoQLyxLEWSkjYWYl_GkDCbMU_WRyOqbZz11E,13078 -httpx/_auth.py,sha256=Yr3QwaUSK17rGYx-7j-FdicFIzz4Y9FFV-1F4-7RXX4,11891 -httpx/_client.py,sha256=xKv340hcLfOPCnilexEh2gNg5BrEQqIS7_O0ZB9nxWw,68032 -httpx/_compat.py,sha256=r30ViaoqRerTF84IvzjA7k1L4144UWDLEiKFYW6F6U0,2258 -httpx/_config.py,sha256=BJZDSp3nRg7XHPhvP2vz7K_A9n4tefqQS4F-UmFLyXA,12259 -httpx/_content.py,sha256=0UsVVH6JwcQLt7STbZzS0GPh_13QSxKTOv39QaE7zck,8073 -httpx/_decoders.py,sha256=meUGWtnOoODJRAAGHTs9VdMhe3NplJg4D2qAJLj8d9c,11444 -httpx/_exceptions.py,sha256=bxW7fxzgVMAdNTbwT0Vnq04gJDW1_gI_GFiQPuMyjL0,8527 -httpx/_main.py,sha256=LcRXtGghiTux7yj0pGXQXx7PNfr3EHE3VcxBcCY4RcE,15635 -httpx/_models.py,sha256=yjVVRmy0VXhJngWbxZ8Pnn2Jpwr-22Zy_JPakfyRqWU,42372 -httpx/_multipart.py,sha256=CkS8cH5Nau1YrvizDSCRhdK13fltMV2-GnvM3msGRzw,8885 -httpx/_status_codes.py,sha256=DYn-2ufBgMeXy5s8x3_TB7wjAuAAMewTakPrm5rXEsc,5639 -httpx/_transports/__init__.py,sha256=GbUoBSAOp7z-l-9j5YhMhR3DMIcn6FVLhj072O3Nnno,275 -httpx/_transports/__pycache__/__init__.cpython-312.pyc,, -httpx/_transports/__pycache__/asgi.cpython-312.pyc,, -httpx/_transports/__pycache__/base.cpython-312.pyc,, -httpx/_transports/__pycache__/default.cpython-312.pyc,, -httpx/_transports/__pycache__/mock.cpython-312.pyc,, -httpx/_transports/__pycache__/wsgi.cpython-312.pyc,, -httpx/_transports/asgi.py,sha256=R51xA7vsZvPb4f2g8lGvYCo2vhkcBl0LK_SbwzxQ0_k,5234 -httpx/_transports/base.py,sha256=kZS_VMbViYfF570pogUCJ1bulz-ybfL51Pqs9yktebU,2523 -httpx/_transports/default.py,sha256=Mxq8Z7uTUOSYx5gXSax8gexeOYF000RAcqpllbUMdAY,13395 -httpx/_transports/mock.py,sha256=PTo0d567RITXxGrki6kN7_67wwAxfwiMDcuXJiZCjEo,1232 -httpx/_transports/wsgi.py,sha256=NcPX3Xap_EwCFZWO_OaSyQNuInCYx1QMNbO8GAei6jY,4825 -httpx/_types.py,sha256=9GNQPQu8uJwXJdthlkMqUhDDSDpUDzLuF1lwaDZhFK0,3451 -httpx/_urlparse.py,sha256=aiYG6lL323vw5iC_OoOUtUNXc1G6wRtn9eBc_G2OcHc,17918 -httpx/_urls.py,sha256=JgYkK4IuC3PviLY-Dnyam_n-MxjT5g_dJibUSWtt_Kc,21808 -httpx/_utils.py,sha256=lByQlK36pmXTJa7WxlWEfB48tcKb9fxI8xEO4ayi1JM,13858 -httpx/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/WHEEL b/myenv/Lib/site-packages/httpx-0.27.2.dist-info/WHEEL deleted file mode 100644 index cdd68a4..0000000 --- a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: hatchling 1.25.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/entry_points.txt b/myenv/Lib/site-packages/httpx-0.27.2.dist-info/entry_points.txt deleted file mode 100644 index 8ae9600..0000000 --- a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/entry_points.txt +++ /dev/null @@ -1,2 +0,0 @@ -[console_scripts] -httpx = httpx:main diff --git a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/licenses/LICENSE.md b/myenv/Lib/site-packages/httpx-0.27.2.dist-info/licenses/LICENSE.md deleted file mode 100644 index ab79d16..0000000 --- a/myenv/Lib/site-packages/httpx-0.27.2.dist-info/licenses/LICENSE.md +++ /dev/null @@ -1,12 +0,0 @@ -Copyright © 2019, [Encode OSS Ltd](https://www.encode.io/). -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - -* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - -* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. - -* Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/Lib/site-packages/httpx/__init__.py b/myenv/Lib/site-packages/httpx/__init__.py deleted file mode 100644 index e9addde..0000000 --- a/myenv/Lib/site-packages/httpx/__init__.py +++ /dev/null @@ -1,105 +0,0 @@ -from .__version__ import __description__, __title__, __version__ -from ._api import * -from ._auth import * -from ._client import * -from ._config import * -from ._content import * -from ._exceptions import * -from ._models import * -from ._status_codes import * -from ._transports import * -from ._types import * -from ._urls import * - -try: - from ._main import main -except ImportError: # pragma: no cover - - def main() -> None: # type: ignore - import sys - - print( - "The httpx command line client could not run because the required " - "dependencies were not installed.\nMake sure you've installed " - "everything with: pip install 'httpx[cli]'" - ) - sys.exit(1) - - -__all__ = [ - "__description__", - "__title__", - "__version__", - "ASGITransport", - "AsyncBaseTransport", - "AsyncByteStream", - "AsyncClient", - "AsyncHTTPTransport", - "Auth", - "BaseTransport", - "BasicAuth", - "ByteStream", - "Client", - "CloseError", - "codes", - "ConnectError", - "ConnectTimeout", - "CookieConflict", - "Cookies", - "create_ssl_context", - "DecodingError", - "delete", - "DigestAuth", - "get", - "head", - "Headers", - "HTTPError", - "HTTPStatusError", - "HTTPTransport", - "InvalidURL", - "Limits", - "LocalProtocolError", - "main", - "MockTransport", - "NetRCAuth", - "NetworkError", - "options", - "patch", - "PoolTimeout", - "post", - "ProtocolError", - "Proxy", - "ProxyError", - "put", - "QueryParams", - "ReadError", - "ReadTimeout", - "RemoteProtocolError", - "request", - "Request", - "RequestError", - "RequestNotRead", - "Response", - "ResponseNotRead", - "stream", - "StreamClosed", - "StreamConsumed", - "StreamError", - "SyncByteStream", - "Timeout", - "TimeoutException", - "TooManyRedirects", - "TransportError", - "UnsupportedProtocol", - "URL", - "USE_CLIENT_DEFAULT", - "WriteError", - "WriteTimeout", - "WSGITransport", -] - - -__locals = locals() -for __name in __all__: - if not __name.startswith("__"): - setattr(__locals[__name], "__module__", "httpx") # noqa diff --git a/myenv/Lib/site-packages/httpx/__version__.py b/myenv/Lib/site-packages/httpx/__version__.py deleted file mode 100644 index 5eaaddb..0000000 --- a/myenv/Lib/site-packages/httpx/__version__.py +++ /dev/null @@ -1,3 +0,0 @@ -__title__ = "httpx" -__description__ = "A next generation HTTP client, for Python 3." -__version__ = "0.27.2" diff --git a/myenv/Lib/site-packages/httpx/_api.py b/myenv/Lib/site-packages/httpx/_api.py deleted file mode 100644 index 4e98b60..0000000 --- a/myenv/Lib/site-packages/httpx/_api.py +++ /dev/null @@ -1,479 +0,0 @@ -from __future__ import annotations - -import typing -from contextlib import contextmanager - -from ._client import Client -from ._config import DEFAULT_TIMEOUT_CONFIG -from ._models import Response -from ._types import ( - AuthTypes, - CertTypes, - CookieTypes, - HeaderTypes, - ProxiesTypes, - ProxyTypes, - QueryParamTypes, - RequestContent, - RequestData, - RequestFiles, - TimeoutTypes, - VerifyTypes, -) -from ._urls import URL - -__all__ = [ - "delete", - "get", - "head", - "options", - "patch", - "post", - "put", - "request", - "stream", -] - - -def request( - method: str, - url: URL | str, - *, - params: QueryParamTypes | None = None, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - verify: VerifyTypes = True, - cert: CertTypes | None = None, - trust_env: bool = True, -) -> Response: - """ - Sends an HTTP request. - - **Parameters:** - - * **method** - HTTP method for the new `Request` object: `GET`, `OPTIONS`, - `HEAD`, `POST`, `PUT`, `PATCH`, or `DELETE`. - * **url** - URL for the new `Request` object. - * **params** - *(optional)* Query parameters to include in the URL, as a - string, dictionary, or sequence of two-tuples. - * **content** - *(optional)* Binary content to include in the body of the - request, as bytes or a byte iterator. - * **data** - *(optional)* Form data to include in the body of the request, - as a dictionary. - * **files** - *(optional)* A dictionary of upload files to include in the - body of the request. - * **json** - *(optional)* A JSON serializable object to include in the body - of the request. - * **headers** - *(optional)* Dictionary of HTTP headers to include in the - request. - * **cookies** - *(optional)* Dictionary of Cookie items to include in the - request. - * **auth** - *(optional)* An authentication class to use when sending the - request. - * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. - * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy URLs. - * **timeout** - *(optional)* The timeout configuration to use when sending - the request. - * **follow_redirects** - *(optional)* Enables or disables HTTP redirects. - * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to - verify the identity of requested hosts. Either `True` (default CA bundle), - a path to an SSL certificate file, an `ssl.SSLContext`, or `False` - (which will disable verification). - * **cert** - *(optional)* An SSL certificate used by the requested host - to authenticate the client. Either a path to an SSL certificate file, or - two-tuple of (certificate file, key file), or a three-tuple of (certificate - file, key file, password). - * **trust_env** - *(optional)* Enables or disables usage of environment - variables for configuration. - - **Returns:** `Response` - - Usage: - - ``` - >>> import httpx - >>> response = httpx.request('GET', 'https://httpbin.org/get') - >>> response - - ``` - """ - with Client( - cookies=cookies, - proxy=proxy, - proxies=proxies, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) as client: - return client.request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - auth=auth, - follow_redirects=follow_redirects, - ) - - -@contextmanager -def stream( - method: str, - url: URL | str, - *, - params: QueryParamTypes | None = None, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - verify: VerifyTypes = True, - cert: CertTypes | None = None, - trust_env: bool = True, -) -> typing.Iterator[Response]: - """ - Alternative to `httpx.request()` that streams the response body - instead of loading it into memory at once. - - **Parameters**: See `httpx.request`. - - See also: [Streaming Responses][0] - - [0]: /quickstart#streaming-responses - """ - with Client( - cookies=cookies, - proxy=proxy, - proxies=proxies, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) as client: - with client.stream( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - auth=auth, - follow_redirects=follow_redirects, - ) as response: - yield response - - -def get( - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - follow_redirects: bool = False, - cert: CertTypes | None = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `GET` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `GET` requests should not include a request body. - """ - return request( - "GET", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def options( - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - follow_redirects: bool = False, - cert: CertTypes | None = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends an `OPTIONS` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `OPTIONS` requests should not include a request body. - """ - return request( - "OPTIONS", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def head( - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - follow_redirects: bool = False, - cert: CertTypes | None = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `HEAD` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `HEAD` requests should not include a request body. - """ - return request( - "HEAD", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def post( - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - follow_redirects: bool = False, - cert: CertTypes | None = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `POST` request. - - **Parameters**: See `httpx.request`. - """ - return request( - "POST", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def put( - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - follow_redirects: bool = False, - cert: CertTypes | None = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `PUT` request. - - **Parameters**: See `httpx.request`. - """ - return request( - "PUT", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def patch( - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - follow_redirects: bool = False, - cert: CertTypes | None = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `PATCH` request. - - **Parameters**: See `httpx.request`. - """ - return request( - "PATCH", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) - - -def delete( - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | None = None, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - follow_redirects: bool = False, - cert: CertTypes | None = None, - verify: VerifyTypes = True, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - trust_env: bool = True, -) -> Response: - """ - Sends a `DELETE` request. - - **Parameters**: See `httpx.request`. - - Note that the `data`, `files`, `json` and `content` parameters are not available - on this function, as `DELETE` requests should not include a request body. - """ - return request( - "DELETE", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - proxy=proxy, - proxies=proxies, - follow_redirects=follow_redirects, - cert=cert, - verify=verify, - timeout=timeout, - trust_env=trust_env, - ) diff --git a/myenv/Lib/site-packages/httpx/_auth.py b/myenv/Lib/site-packages/httpx/_auth.py deleted file mode 100644 index b03971a..0000000 --- a/myenv/Lib/site-packages/httpx/_auth.py +++ /dev/null @@ -1,348 +0,0 @@ -from __future__ import annotations - -import hashlib -import os -import re -import time -import typing -from base64 import b64encode -from urllib.request import parse_http_list - -from ._exceptions import ProtocolError -from ._models import Cookies, Request, Response -from ._utils import to_bytes, to_str, unquote - -if typing.TYPE_CHECKING: # pragma: no cover - from hashlib import _Hash - - -__all__ = ["Auth", "BasicAuth", "DigestAuth", "NetRCAuth"] - - -class Auth: - """ - Base class for all authentication schemes. - - To implement a custom authentication scheme, subclass `Auth` and override - the `.auth_flow()` method. - - If the authentication scheme does I/O such as disk access or network calls, or uses - synchronization primitives such as locks, you should override `.sync_auth_flow()` - and/or `.async_auth_flow()` instead of `.auth_flow()` to provide specialized - implementations that will be used by `Client` and `AsyncClient` respectively. - """ - - requires_request_body = False - requires_response_body = False - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - """ - Execute the authentication flow. - - To dispatch a request, `yield` it: - - ``` - yield request - ``` - - The client will `.send()` the response back into the flow generator. You can - access it like so: - - ``` - response = yield request - ``` - - A `return` (or reaching the end of the generator) will result in the - client returning the last response obtained from the server. - - You can dispatch as many requests as is necessary. - """ - yield request - - def sync_auth_flow( - self, request: Request - ) -> typing.Generator[Request, Response, None]: - """ - Execute the authentication flow synchronously. - - By default, this defers to `.auth_flow()`. You should override this method - when the authentication scheme does I/O and/or uses concurrency primitives. - """ - if self.requires_request_body: - request.read() - - flow = self.auth_flow(request) - request = next(flow) - - while True: - response = yield request - if self.requires_response_body: - response.read() - - try: - request = flow.send(response) - except StopIteration: - break - - async def async_auth_flow( - self, request: Request - ) -> typing.AsyncGenerator[Request, Response]: - """ - Execute the authentication flow asynchronously. - - By default, this defers to `.auth_flow()`. You should override this method - when the authentication scheme does I/O and/or uses concurrency primitives. - """ - if self.requires_request_body: - await request.aread() - - flow = self.auth_flow(request) - request = next(flow) - - while True: - response = yield request - if self.requires_response_body: - await response.aread() - - try: - request = flow.send(response) - except StopIteration: - break - - -class FunctionAuth(Auth): - """ - Allows the 'auth' argument to be passed as a simple callable function, - that takes the request, and returns a new, modified request. - """ - - def __init__(self, func: typing.Callable[[Request], Request]) -> None: - self._func = func - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - yield self._func(request) - - -class BasicAuth(Auth): - """ - Allows the 'auth' argument to be passed as a (username, password) pair, - and uses HTTP Basic authentication. - """ - - def __init__(self, username: str | bytes, password: str | bytes) -> None: - self._auth_header = self._build_auth_header(username, password) - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - request.headers["Authorization"] = self._auth_header - yield request - - def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: - userpass = b":".join((to_bytes(username), to_bytes(password))) - token = b64encode(userpass).decode() - return f"Basic {token}" - - -class NetRCAuth(Auth): - """ - Use a 'netrc' file to lookup basic auth credentials based on the url host. - """ - - def __init__(self, file: str | None = None) -> None: - # Lazily import 'netrc'. - # There's no need for us to load this module unless 'NetRCAuth' is being used. - import netrc - - self._netrc_info = netrc.netrc(file) - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - auth_info = self._netrc_info.authenticators(request.url.host) - if auth_info is None or not auth_info[2]: - # The netrc file did not have authentication credentials for this host. - yield request - else: - # Build a basic auth header with credentials from the netrc file. - request.headers["Authorization"] = self._build_auth_header( - username=auth_info[0], password=auth_info[2] - ) - yield request - - def _build_auth_header(self, username: str | bytes, password: str | bytes) -> str: - userpass = b":".join((to_bytes(username), to_bytes(password))) - token = b64encode(userpass).decode() - return f"Basic {token}" - - -class DigestAuth(Auth): - _ALGORITHM_TO_HASH_FUNCTION: dict[str, typing.Callable[[bytes], _Hash]] = { - "MD5": hashlib.md5, - "MD5-SESS": hashlib.md5, - "SHA": hashlib.sha1, - "SHA-SESS": hashlib.sha1, - "SHA-256": hashlib.sha256, - "SHA-256-SESS": hashlib.sha256, - "SHA-512": hashlib.sha512, - "SHA-512-SESS": hashlib.sha512, - } - - def __init__(self, username: str | bytes, password: str | bytes) -> None: - self._username = to_bytes(username) - self._password = to_bytes(password) - self._last_challenge: _DigestAuthChallenge | None = None - self._nonce_count = 1 - - def auth_flow(self, request: Request) -> typing.Generator[Request, Response, None]: - if self._last_challenge: - request.headers["Authorization"] = self._build_auth_header( - request, self._last_challenge - ) - - response = yield request - - if response.status_code != 401 or "www-authenticate" not in response.headers: - # If the response is not a 401 then we don't - # need to build an authenticated request. - return - - for auth_header in response.headers.get_list("www-authenticate"): - if auth_header.lower().startswith("digest "): - break - else: - # If the response does not include a 'WWW-Authenticate: Digest ...' - # header, then we don't need to build an authenticated request. - return - - self._last_challenge = self._parse_challenge(request, response, auth_header) - self._nonce_count = 1 - - request.headers["Authorization"] = self._build_auth_header( - request, self._last_challenge - ) - if response.cookies: - Cookies(response.cookies).set_cookie_header(request=request) - yield request - - def _parse_challenge( - self, request: Request, response: Response, auth_header: str - ) -> _DigestAuthChallenge: - """ - Returns a challenge from a Digest WWW-Authenticate header. - These take the form of: - `Digest realm="realm@host.com",qop="auth,auth-int",nonce="abc",opaque="xyz"` - """ - scheme, _, fields = auth_header.partition(" ") - - # This method should only ever have been called with a Digest auth header. - assert scheme.lower() == "digest" - - header_dict: dict[str, str] = {} - for field in parse_http_list(fields): - key, value = field.strip().split("=", 1) - header_dict[key] = unquote(value) - - try: - realm = header_dict["realm"].encode() - nonce = header_dict["nonce"].encode() - algorithm = header_dict.get("algorithm", "MD5") - opaque = header_dict["opaque"].encode() if "opaque" in header_dict else None - qop = header_dict["qop"].encode() if "qop" in header_dict else None - return _DigestAuthChallenge( - realm=realm, nonce=nonce, algorithm=algorithm, opaque=opaque, qop=qop - ) - except KeyError as exc: - message = "Malformed Digest WWW-Authenticate header" - raise ProtocolError(message, request=request) from exc - - def _build_auth_header( - self, request: Request, challenge: _DigestAuthChallenge - ) -> str: - hash_func = self._ALGORITHM_TO_HASH_FUNCTION[challenge.algorithm.upper()] - - def digest(data: bytes) -> bytes: - return hash_func(data).hexdigest().encode() - - A1 = b":".join((self._username, challenge.realm, self._password)) - - path = request.url.raw_path - A2 = b":".join((request.method.encode(), path)) - # TODO: implement auth-int - HA2 = digest(A2) - - nc_value = b"%08x" % self._nonce_count - cnonce = self._get_client_nonce(self._nonce_count, challenge.nonce) - self._nonce_count += 1 - - HA1 = digest(A1) - if challenge.algorithm.lower().endswith("-sess"): - HA1 = digest(b":".join((HA1, challenge.nonce, cnonce))) - - qop = self._resolve_qop(challenge.qop, request=request) - if qop is None: - # Following RFC 2069 - digest_data = [HA1, challenge.nonce, HA2] - else: - # Following RFC 2617/7616 - digest_data = [HA1, challenge.nonce, nc_value, cnonce, qop, HA2] - - format_args = { - "username": self._username, - "realm": challenge.realm, - "nonce": challenge.nonce, - "uri": path, - "response": digest(b":".join(digest_data)), - "algorithm": challenge.algorithm.encode(), - } - if challenge.opaque: - format_args["opaque"] = challenge.opaque - if qop: - format_args["qop"] = b"auth" - format_args["nc"] = nc_value - format_args["cnonce"] = cnonce - - return "Digest " + self._get_header_value(format_args) - - def _get_client_nonce(self, nonce_count: int, nonce: bytes) -> bytes: - s = str(nonce_count).encode() - s += nonce - s += time.ctime().encode() - s += os.urandom(8) - - return hashlib.sha1(s).hexdigest()[:16].encode() - - def _get_header_value(self, header_fields: dict[str, bytes]) -> str: - NON_QUOTED_FIELDS = ("algorithm", "qop", "nc") - QUOTED_TEMPLATE = '{}="{}"' - NON_QUOTED_TEMPLATE = "{}={}" - - header_value = "" - for i, (field, value) in enumerate(header_fields.items()): - if i > 0: - header_value += ", " - template = ( - QUOTED_TEMPLATE - if field not in NON_QUOTED_FIELDS - else NON_QUOTED_TEMPLATE - ) - header_value += template.format(field, to_str(value)) - - return header_value - - def _resolve_qop(self, qop: bytes | None, request: Request) -> bytes | None: - if qop is None: - return None - qops = re.split(b", ?", qop) - if b"auth" in qops: - return b"auth" - - if qops == [b"auth-int"]: - raise NotImplementedError("Digest auth-int support is not yet implemented") - - message = f'Unexpected qop value "{qop!r}" in digest auth' - raise ProtocolError(message, request=request) - - -class _DigestAuthChallenge(typing.NamedTuple): - realm: bytes - nonce: bytes - algorithm: str - opaque: bytes | None - qop: bytes | None diff --git a/myenv/Lib/site-packages/httpx/_client.py b/myenv/Lib/site-packages/httpx/_client.py deleted file mode 100644 index 26610f6..0000000 --- a/myenv/Lib/site-packages/httpx/_client.py +++ /dev/null @@ -1,2065 +0,0 @@ -from __future__ import annotations - -import datetime -import enum -import logging -import typing -import warnings -from contextlib import asynccontextmanager, contextmanager -from types import TracebackType - -from .__version__ import __version__ -from ._auth import Auth, BasicAuth, FunctionAuth -from ._config import ( - DEFAULT_LIMITS, - DEFAULT_MAX_REDIRECTS, - DEFAULT_TIMEOUT_CONFIG, - Limits, - Proxy, - Timeout, -) -from ._decoders import SUPPORTED_DECODERS -from ._exceptions import ( - InvalidURL, - RemoteProtocolError, - TooManyRedirects, - request_context, -) -from ._models import Cookies, Headers, Request, Response -from ._status_codes import codes -from ._transports.asgi import ASGITransport -from ._transports.base import AsyncBaseTransport, BaseTransport -from ._transports.default import AsyncHTTPTransport, HTTPTransport -from ._transports.wsgi import WSGITransport -from ._types import ( - AsyncByteStream, - AuthTypes, - CertTypes, - CookieTypes, - HeaderTypes, - ProxiesTypes, - ProxyTypes, - QueryParamTypes, - RequestContent, - RequestData, - RequestExtensions, - RequestFiles, - SyncByteStream, - TimeoutTypes, - VerifyTypes, -) -from ._urls import URL, QueryParams -from ._utils import ( - Timer, - URLPattern, - get_environment_proxies, - is_https_redirect, - same_origin, -) - -__all__ = ["USE_CLIENT_DEFAULT", "AsyncClient", "Client"] - -# The type annotation for @classmethod and context managers here follows PEP 484 -# https://www.python.org/dev/peps/pep-0484/#annotating-instance-and-class-methods -T = typing.TypeVar("T", bound="Client") -U = typing.TypeVar("U", bound="AsyncClient") - - -class UseClientDefault: - """ - For some parameters such as `auth=...` and `timeout=...` we need to be able - to indicate the default "unset" state, in a way that is distinctly different - to using `None`. - - The default "unset" state indicates that whatever default is set on the - client should be used. This is different to setting `None`, which - explicitly disables the parameter, possibly overriding a client default. - - For example we use `timeout=USE_CLIENT_DEFAULT` in the `request()` signature. - Omitting the `timeout` parameter will send a request using whatever default - timeout has been configured on the client. Including `timeout=None` will - ensure no timeout is used. - - Note that user code shouldn't need to use the `USE_CLIENT_DEFAULT` constant, - but it is used internally when a parameter is not included. - """ - - -USE_CLIENT_DEFAULT = UseClientDefault() - - -logger = logging.getLogger("httpx") - -USER_AGENT = f"python-httpx/{__version__}" -ACCEPT_ENCODING = ", ".join( - [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] -) - - -class ClientState(enum.Enum): - # UNOPENED: - # The client has been instantiated, but has not been used to send a request, - # or been opened by entering the context of a `with` block. - UNOPENED = 1 - # OPENED: - # The client has either sent a request, or is within a `with` block. - OPENED = 2 - # CLOSED: - # The client has either exited the `with` block, or `close()` has - # been called explicitly. - CLOSED = 3 - - -class BoundSyncStream(SyncByteStream): - """ - A byte stream that is bound to a given response instance, and that - ensures the `response.elapsed` is set once the response is closed. - """ - - def __init__( - self, stream: SyncByteStream, response: Response, timer: Timer - ) -> None: - self._stream = stream - self._response = response - self._timer = timer - - def __iter__(self) -> typing.Iterator[bytes]: - for chunk in self._stream: - yield chunk - - def close(self) -> None: - seconds = self._timer.sync_elapsed() - self._response.elapsed = datetime.timedelta(seconds=seconds) - self._stream.close() - - -class BoundAsyncStream(AsyncByteStream): - """ - An async byte stream that is bound to a given response instance, and that - ensures the `response.elapsed` is set once the response is closed. - """ - - def __init__( - self, stream: AsyncByteStream, response: Response, timer: Timer - ) -> None: - self._stream = stream - self._response = response - self._timer = timer - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - async for chunk in self._stream: - yield chunk - - async def aclose(self) -> None: - seconds = await self._timer.async_elapsed() - self._response.elapsed = datetime.timedelta(seconds=seconds) - await self._stream.aclose() - - -EventHook = typing.Callable[..., typing.Any] - - -class BaseClient: - def __init__( - self, - *, - auth: AuthTypes | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, - base_url: URL | str = "", - trust_env: bool = True, - default_encoding: str | typing.Callable[[bytes], str] = "utf-8", - ) -> None: - event_hooks = {} if event_hooks is None else event_hooks - - self._base_url = self._enforce_trailing_slash(URL(base_url)) - - self._auth = self._build_auth(auth) - self._params = QueryParams(params) - self.headers = Headers(headers) - self._cookies = Cookies(cookies) - self._timeout = Timeout(timeout) - self.follow_redirects = follow_redirects - self.max_redirects = max_redirects - self._event_hooks = { - "request": list(event_hooks.get("request", [])), - "response": list(event_hooks.get("response", [])), - } - self._trust_env = trust_env - self._default_encoding = default_encoding - self._state = ClientState.UNOPENED - - @property - def is_closed(self) -> bool: - """ - Check if the client being closed - """ - return self._state == ClientState.CLOSED - - @property - def trust_env(self) -> bool: - return self._trust_env - - def _enforce_trailing_slash(self, url: URL) -> URL: - if url.raw_path.endswith(b"/"): - return url - return url.copy_with(raw_path=url.raw_path + b"/") - - def _get_proxy_map( - self, proxies: ProxiesTypes | None, allow_env_proxies: bool - ) -> dict[str, Proxy | None]: - if proxies is None: - if allow_env_proxies: - return { - key: None if url is None else Proxy(url=url) - for key, url in get_environment_proxies().items() - } - return {} - if isinstance(proxies, dict): - new_proxies = {} - for key, value in proxies.items(): - proxy = Proxy(url=value) if isinstance(value, (str, URL)) else value - new_proxies[str(key)] = proxy - return new_proxies - else: - proxy = Proxy(url=proxies) if isinstance(proxies, (str, URL)) else proxies - return {"all://": proxy} - - @property - def timeout(self) -> Timeout: - return self._timeout - - @timeout.setter - def timeout(self, timeout: TimeoutTypes) -> None: - self._timeout = Timeout(timeout) - - @property - def event_hooks(self) -> dict[str, list[EventHook]]: - return self._event_hooks - - @event_hooks.setter - def event_hooks(self, event_hooks: dict[str, list[EventHook]]) -> None: - self._event_hooks = { - "request": list(event_hooks.get("request", [])), - "response": list(event_hooks.get("response", [])), - } - - @property - def auth(self) -> Auth | None: - """ - Authentication class used when none is passed at the request-level. - - See also [Authentication][0]. - - [0]: /quickstart/#authentication - """ - return self._auth - - @auth.setter - def auth(self, auth: AuthTypes) -> None: - self._auth = self._build_auth(auth) - - @property - def base_url(self) -> URL: - """ - Base URL to use when sending requests with relative URLs. - """ - return self._base_url - - @base_url.setter - def base_url(self, url: URL | str) -> None: - self._base_url = self._enforce_trailing_slash(URL(url)) - - @property - def headers(self) -> Headers: - """ - HTTP headers to include when sending requests. - """ - return self._headers - - @headers.setter - def headers(self, headers: HeaderTypes) -> None: - client_headers = Headers( - { - b"Accept": b"*/*", - b"Accept-Encoding": ACCEPT_ENCODING.encode("ascii"), - b"Connection": b"keep-alive", - b"User-Agent": USER_AGENT.encode("ascii"), - } - ) - client_headers.update(headers) - self._headers = client_headers - - @property - def cookies(self) -> Cookies: - """ - Cookie values to include when sending requests. - """ - return self._cookies - - @cookies.setter - def cookies(self, cookies: CookieTypes) -> None: - self._cookies = Cookies(cookies) - - @property - def params(self) -> QueryParams: - """ - Query parameters to include in the URL when sending requests. - """ - return self._params - - @params.setter - def params(self, params: QueryParamTypes) -> None: - self._params = QueryParams(params) - - def build_request( - self, - method: str, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Request: - """ - Build and return a request instance. - - * The `params`, `headers` and `cookies` arguments - are merged with any values set on the client. - * The `url` argument is merged with any `base_url` set on the client. - - See also: [Request instances][0] - - [0]: /advanced/clients/#request-instances - """ - url = self._merge_url(url) - headers = self._merge_headers(headers) - cookies = self._merge_cookies(cookies) - params = self._merge_queryparams(params) - extensions = {} if extensions is None else extensions - if "timeout" not in extensions: - timeout = ( - self.timeout - if isinstance(timeout, UseClientDefault) - else Timeout(timeout) - ) - extensions = dict(**extensions, timeout=timeout.as_dict()) - return Request( - method, - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - extensions=extensions, - ) - - def _merge_url(self, url: URL | str) -> URL: - """ - Merge a URL argument together with any 'base_url' on the client, - to create the URL used for the outgoing request. - """ - merge_url = URL(url) - if merge_url.is_relative_url: - # To merge URLs we always append to the base URL. To get this - # behaviour correct we always ensure the base URL ends in a '/' - # separator, and strip any leading '/' from the merge URL. - # - # So, eg... - # - # >>> client = Client(base_url="https://www.example.com/subpath") - # >>> client.base_url - # URL('https://www.example.com/subpath/') - # >>> client.build_request("GET", "/path").url - # URL('https://www.example.com/subpath/path') - merge_raw_path = self.base_url.raw_path + merge_url.raw_path.lstrip(b"/") - return self.base_url.copy_with(raw_path=merge_raw_path) - return merge_url - - def _merge_cookies(self, cookies: CookieTypes | None = None) -> CookieTypes | None: - """ - Merge a cookies argument together with any cookies on the client, - to create the cookies used for the outgoing request. - """ - if cookies or self.cookies: - merged_cookies = Cookies(self.cookies) - merged_cookies.update(cookies) - return merged_cookies - return cookies - - def _merge_headers(self, headers: HeaderTypes | None = None) -> HeaderTypes | None: - """ - Merge a headers argument together with any headers on the client, - to create the headers used for the outgoing request. - """ - merged_headers = Headers(self.headers) - merged_headers.update(headers) - return merged_headers - - def _merge_queryparams( - self, params: QueryParamTypes | None = None - ) -> QueryParamTypes | None: - """ - Merge a queryparams argument together with any queryparams on the client, - to create the queryparams used for the outgoing request. - """ - if params or self.params: - merged_queryparams = QueryParams(self.params) - return merged_queryparams.merge(params) - return params - - def _build_auth(self, auth: AuthTypes | None) -> Auth | None: - if auth is None: - return None - elif isinstance(auth, tuple): - return BasicAuth(username=auth[0], password=auth[1]) - elif isinstance(auth, Auth): - return auth - elif callable(auth): - return FunctionAuth(func=auth) - else: - raise TypeError(f'Invalid "auth" argument: {auth!r}') - - def _build_request_auth( - self, - request: Request, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - ) -> Auth: - auth = ( - self._auth if isinstance(auth, UseClientDefault) else self._build_auth(auth) - ) - - if auth is not None: - return auth - - username, password = request.url.username, request.url.password - if username or password: - return BasicAuth(username=username, password=password) - - return Auth() - - def _build_redirect_request(self, request: Request, response: Response) -> Request: - """ - Given a request and a redirect response, return a new request that - should be used to effect the redirect. - """ - method = self._redirect_method(request, response) - url = self._redirect_url(request, response) - headers = self._redirect_headers(request, url, method) - stream = self._redirect_stream(request, method) - cookies = Cookies(self.cookies) - return Request( - method=method, - url=url, - headers=headers, - cookies=cookies, - stream=stream, - extensions=request.extensions, - ) - - def _redirect_method(self, request: Request, response: Response) -> str: - """ - When being redirected we may want to change the method of the request - based on certain specs or browser behavior. - """ - method = request.method - - # https://tools.ietf.org/html/rfc7231#section-6.4.4 - if response.status_code == codes.SEE_OTHER and method != "HEAD": - method = "GET" - - # Do what the browsers do, despite standards... - # Turn 302s into GETs. - if response.status_code == codes.FOUND and method != "HEAD": - method = "GET" - - # If a POST is responded to with a 301, turn it into a GET. - # This bizarre behaviour is explained in 'requests' issue 1704. - if response.status_code == codes.MOVED_PERMANENTLY and method == "POST": - method = "GET" - - return method - - def _redirect_url(self, request: Request, response: Response) -> URL: - """ - Return the URL for the redirect to follow. - """ - location = response.headers["Location"] - - try: - url = URL(location) - except InvalidURL as exc: - raise RemoteProtocolError( - f"Invalid URL in location header: {exc}.", request=request - ) from None - - # Handle malformed 'Location' headers that are "absolute" form, have no host. - # See: https://github.com/encode/httpx/issues/771 - if url.scheme and not url.host: - url = url.copy_with(host=request.url.host) - - # Facilitate relative 'Location' headers, as allowed by RFC 7231. - # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') - if url.is_relative_url: - url = request.url.join(url) - - # Attach previous fragment if needed (RFC 7231 7.1.2) - if request.url.fragment and not url.fragment: - url = url.copy_with(fragment=request.url.fragment) - - return url - - def _redirect_headers(self, request: Request, url: URL, method: str) -> Headers: - """ - Return the headers that should be used for the redirect request. - """ - headers = Headers(request.headers) - - if not same_origin(url, request.url): - if not is_https_redirect(request.url, url): - # Strip Authorization headers when responses are redirected - # away from the origin. (Except for direct HTTP to HTTPS redirects.) - headers.pop("Authorization", None) - - # Update the Host header. - headers["Host"] = url.netloc.decode("ascii") - - if method != request.method and method == "GET": - # If we've switch to a 'GET' request, then strip any headers which - # are only relevant to the request body. - headers.pop("Content-Length", None) - headers.pop("Transfer-Encoding", None) - - # We should use the client cookie store to determine any cookie header, - # rather than whatever was on the original outgoing request. - headers.pop("Cookie", None) - - return headers - - def _redirect_stream( - self, request: Request, method: str - ) -> SyncByteStream | AsyncByteStream | None: - """ - Return the body that should be used for the redirect request. - """ - if method != request.method and method == "GET": - return None - - return request.stream - - def _set_timeout(self, request: Request) -> None: - if "timeout" not in request.extensions: - timeout = ( - self.timeout - if isinstance(self.timeout, UseClientDefault) - else Timeout(self.timeout) - ) - request.extensions = dict(**request.extensions, timeout=timeout.as_dict()) - - -class Client(BaseClient): - """ - An HTTP client, with connection pooling, HTTP/2, redirects, cookie persistence, etc. - - It can be shared between threads. - - Usage: - - ```python - >>> client = httpx.Client() - >>> response = client.get('https://example.org') - ``` - - **Parameters:** - - * **auth** - *(optional)* An authentication class to use when sending - requests. - * **params** - *(optional)* Query parameters to include in request URLs, as - a string, dictionary, or sequence of two-tuples. - * **headers** - *(optional)* Dictionary of HTTP headers to include when - sending requests. - * **cookies** - *(optional)* Dictionary of Cookie items to include when - sending requests. - * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to - verify the identity of requested hosts. Either `True` (default CA bundle), - a path to an SSL certificate file, an `ssl.SSLContext`, or `False` - (which will disable verification). - * **cert** - *(optional)* An SSL certificate used by the requested host - to authenticate the client. Either a path to an SSL certificate file, or - two-tuple of (certificate file, key file), or a three-tuple of (certificate - file, key file, password). - * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be - enabled. Defaults to `False`. - * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. - * **proxies** - *(optional)* A dictionary mapping proxy keys to proxy - URLs. - * **timeout** - *(optional)* The timeout configuration to use when sending - requests. - * **limits** - *(optional)* The limits configuration to use. - * **max_redirects** - *(optional)* The maximum number of redirect responses - that should be followed. - * **base_url** - *(optional)* A URL to use as the base when building - request URLs. - * **transport** - *(optional)* A transport class to use for sending requests - over the network. - * **app** - *(optional)* An WSGI application to send requests to, - rather than sending actual network requests. - * **trust_env** - *(optional)* Enables or disables usage of environment - variables for configuration. - * **default_encoding** - *(optional)* The default encoding to use for decoding - response text, if no charset information is included in a response Content-Type - header. Set to a callable for automatic character set detection. Default: "utf-8". - """ - - def __init__( - self, - *, - auth: AuthTypes | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - verify: VerifyTypes = True, - cert: CertTypes | None = None, - http1: bool = True, - http2: bool = False, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - mounts: None | (typing.Mapping[str, BaseTransport | None]) = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - limits: Limits = DEFAULT_LIMITS, - max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, - base_url: URL | str = "", - transport: BaseTransport | None = None, - app: typing.Callable[..., typing.Any] | None = None, - trust_env: bool = True, - default_encoding: str | typing.Callable[[bytes], str] = "utf-8", - ) -> None: - super().__init__( - auth=auth, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - follow_redirects=follow_redirects, - max_redirects=max_redirects, - event_hooks=event_hooks, - base_url=base_url, - trust_env=trust_env, - default_encoding=default_encoding, - ) - - if http2: - try: - import h2 # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using http2=True, but the 'h2' package is not installed. " - "Make sure to install httpx using `pip install httpx[http2]`." - ) from None - - if proxies: - message = ( - "The 'proxies' argument is now deprecated." - " Use 'proxy' or 'mounts' instead." - ) - warnings.warn(message, DeprecationWarning) - if proxy: - raise RuntimeError("Use either `proxy` or 'proxies', not both.") - - if app: - message = ( - "The 'app' shortcut is now deprecated." - " Use the explicit style 'transport=WSGITransport(app=...)' instead." - ) - warnings.warn(message, DeprecationWarning) - - allow_env_proxies = trust_env and app is None and transport is None - proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies) - - self._transport = self._init_transport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - transport=transport, - app=app, - trust_env=trust_env, - ) - self._mounts: dict[URLPattern, BaseTransport | None] = { - URLPattern(key): None - if proxy is None - else self._init_proxy_transport( - proxy, - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - ) - for key, proxy in proxy_map.items() - } - if mounts is not None: - self._mounts.update( - {URLPattern(key): transport for key, transport in mounts.items()} - ) - - self._mounts = dict(sorted(self._mounts.items())) - - def _init_transport( - self, - verify: VerifyTypes = True, - cert: CertTypes | None = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - transport: BaseTransport | None = None, - app: typing.Callable[..., typing.Any] | None = None, - trust_env: bool = True, - ) -> BaseTransport: - if transport is not None: - return transport - - if app is not None: - return WSGITransport(app=app) - - return HTTPTransport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - ) - - def _init_proxy_transport( - self, - proxy: Proxy, - verify: VerifyTypes = True, - cert: CertTypes | None = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - trust_env: bool = True, - ) -> BaseTransport: - return HTTPTransport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - proxy=proxy, - ) - - def _transport_for_url(self, url: URL) -> BaseTransport: - """ - Returns the transport instance that should be used for a given URL. - This will either be the standard connection pool, or a proxy. - """ - for pattern, transport in self._mounts.items(): - if pattern.matches(url): - return self._transport if transport is None else transport - - return self._transport - - def request( - self, - method: str, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Build and send a request. - - Equivalent to: - - ```python - request = client.build_request(...) - response = client.send(request, ...) - ``` - - See `Client.build_request()`, `Client.send()` and - [Merging of configuration][0] for how the various parameters - are merged with client-level configuration. - - [0]: /advanced/clients/#merging-of-configuration - """ - if cookies is not None: - message = ( - "Setting per-request cookies=<...> is being deprecated, because " - "the expected behaviour on cookie persistence is ambiguous. Set " - "cookies directly on the client instance instead." - ) - warnings.warn(message, DeprecationWarning) - - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - return self.send(request, auth=auth, follow_redirects=follow_redirects) - - @contextmanager - def stream( - self, - method: str, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> typing.Iterator[Response]: - """ - Alternative to `httpx.request()` that streams the response body - instead of loading it into memory at once. - - **Parameters**: See `httpx.request`. - - See also: [Streaming Responses][0] - - [0]: /quickstart#streaming-responses - """ - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - response = self.send( - request=request, - auth=auth, - follow_redirects=follow_redirects, - stream=True, - ) - try: - yield response - finally: - response.close() - - def send( - self, - request: Request, - *, - stream: bool = False, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - ) -> Response: - """ - Send a request. - - The request is sent as-is, unmodified. - - Typically you'll want to build one with `Client.build_request()` - so that any client-level configuration is merged into the request, - but passing an explicit `httpx.Request()` is supported as well. - - See also: [Request instances][0] - - [0]: /advanced/clients/#request-instances - """ - if self._state == ClientState.CLOSED: - raise RuntimeError("Cannot send a request, as the client has been closed.") - - self._state = ClientState.OPENED - follow_redirects = ( - self.follow_redirects - if isinstance(follow_redirects, UseClientDefault) - else follow_redirects - ) - - self._set_timeout(request) - - auth = self._build_request_auth(request, auth) - - response = self._send_handling_auth( - request, - auth=auth, - follow_redirects=follow_redirects, - history=[], - ) - try: - if not stream: - response.read() - - return response - - except BaseException as exc: - response.close() - raise exc - - def _send_handling_auth( - self, - request: Request, - auth: Auth, - follow_redirects: bool, - history: list[Response], - ) -> Response: - auth_flow = auth.sync_auth_flow(request) - try: - request = next(auth_flow) - - while True: - response = self._send_handling_redirects( - request, - follow_redirects=follow_redirects, - history=history, - ) - try: - try: - next_request = auth_flow.send(response) - except StopIteration: - return response - - response.history = list(history) - response.read() - request = next_request - history.append(response) - - except BaseException as exc: - response.close() - raise exc - finally: - auth_flow.close() - - def _send_handling_redirects( - self, - request: Request, - follow_redirects: bool, - history: list[Response], - ) -> Response: - while True: - if len(history) > self.max_redirects: - raise TooManyRedirects( - "Exceeded maximum allowed redirects.", request=request - ) - - for hook in self._event_hooks["request"]: - hook(request) - - response = self._send_single_request(request) - try: - for hook in self._event_hooks["response"]: - hook(response) - response.history = list(history) - - if not response.has_redirect_location: - return response - - request = self._build_redirect_request(request, response) - history = history + [response] - - if follow_redirects: - response.read() - else: - response.next_request = request - return response - - except BaseException as exc: - response.close() - raise exc - - def _send_single_request(self, request: Request) -> Response: - """ - Sends a single request, without handling any redirections. - """ - transport = self._transport_for_url(request.url) - timer = Timer() - timer.sync_start() - - if not isinstance(request.stream, SyncByteStream): - raise RuntimeError( - "Attempted to send an async request with a sync Client instance." - ) - - with request_context(request=request): - response = transport.handle_request(request) - - assert isinstance(response.stream, SyncByteStream) - - response.request = request - response.stream = BoundSyncStream( - response.stream, response=response, timer=timer - ) - self.cookies.extract_cookies(response) - response.default_encoding = self._default_encoding - - logger.info( - 'HTTP Request: %s %s "%s %d %s"', - request.method, - request.url, - response.http_version, - response.status_code, - response.reason_phrase, - ) - - return response - - def get( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `GET` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "GET", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def options( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send an `OPTIONS` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "OPTIONS", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def head( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `HEAD` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "HEAD", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def post( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `POST` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "POST", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def put( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `PUT` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "PUT", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def patch( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `PATCH` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "PATCH", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def delete( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `DELETE` request. - - **Parameters**: See `httpx.request`. - """ - return self.request( - "DELETE", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - def close(self) -> None: - """ - Close transport and proxies. - """ - if self._state != ClientState.CLOSED: - self._state = ClientState.CLOSED - - self._transport.close() - for transport in self._mounts.values(): - if transport is not None: - transport.close() - - def __enter__(self: T) -> T: - if self._state != ClientState.UNOPENED: - msg = { - ClientState.OPENED: "Cannot open a client instance more than once.", - ClientState.CLOSED: ( - "Cannot reopen a client instance, once it has been closed." - ), - }[self._state] - raise RuntimeError(msg) - - self._state = ClientState.OPENED - - self._transport.__enter__() - for transport in self._mounts.values(): - if transport is not None: - transport.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - self._state = ClientState.CLOSED - - self._transport.__exit__(exc_type, exc_value, traceback) - for transport in self._mounts.values(): - if transport is not None: - transport.__exit__(exc_type, exc_value, traceback) - - -class AsyncClient(BaseClient): - """ - An asynchronous HTTP client, with connection pooling, HTTP/2, redirects, - cookie persistence, etc. - - It can be shared between tasks. - - Usage: - - ```python - >>> async with httpx.AsyncClient() as client: - >>> response = await client.get('https://example.org') - ``` - - **Parameters:** - - * **auth** - *(optional)* An authentication class to use when sending - requests. - * **params** - *(optional)* Query parameters to include in request URLs, as - a string, dictionary, or sequence of two-tuples. - * **headers** - *(optional)* Dictionary of HTTP headers to include when - sending requests. - * **cookies** - *(optional)* Dictionary of Cookie items to include when - sending requests. - * **verify** - *(optional)* SSL certificates (a.k.a CA bundle) used to - verify the identity of requested hosts. Either `True` (default CA bundle), - a path to an SSL certificate file, an `ssl.SSLContext`, or `False` - (which will disable verification). - * **cert** - *(optional)* An SSL certificate used by the requested host - to authenticate the client. Either a path to an SSL certificate file, or - two-tuple of (certificate file, key file), or a three-tuple of (certificate - file, key file, password). - * **http2** - *(optional)* A boolean indicating if HTTP/2 support should be - enabled. Defaults to `False`. - * **proxy** - *(optional)* A proxy URL where all the traffic should be routed. - * **proxies** - *(optional)* A dictionary mapping HTTP protocols to proxy - URLs. - * **timeout** - *(optional)* The timeout configuration to use when sending - requests. - * **limits** - *(optional)* The limits configuration to use. - * **max_redirects** - *(optional)* The maximum number of redirect responses - that should be followed. - * **base_url** - *(optional)* A URL to use as the base when building - request URLs. - * **transport** - *(optional)* A transport class to use for sending requests - over the network. - * **app** - *(optional)* An ASGI application to send requests to, - rather than sending actual network requests. - * **trust_env** - *(optional)* Enables or disables usage of environment - variables for configuration. - * **default_encoding** - *(optional)* The default encoding to use for decoding - response text, if no charset information is included in a response Content-Type - header. Set to a callable for automatic character set detection. Default: "utf-8". - """ - - def __init__( - self, - *, - auth: AuthTypes | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - verify: VerifyTypes = True, - cert: CertTypes | None = None, - http1: bool = True, - http2: bool = False, - proxy: ProxyTypes | None = None, - proxies: ProxiesTypes | None = None, - mounts: None | (typing.Mapping[str, AsyncBaseTransport | None]) = None, - timeout: TimeoutTypes = DEFAULT_TIMEOUT_CONFIG, - follow_redirects: bool = False, - limits: Limits = DEFAULT_LIMITS, - max_redirects: int = DEFAULT_MAX_REDIRECTS, - event_hooks: None | (typing.Mapping[str, list[EventHook]]) = None, - base_url: URL | str = "", - transport: AsyncBaseTransport | None = None, - app: typing.Callable[..., typing.Any] | None = None, - trust_env: bool = True, - default_encoding: str | typing.Callable[[bytes], str] = "utf-8", - ) -> None: - super().__init__( - auth=auth, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - follow_redirects=follow_redirects, - max_redirects=max_redirects, - event_hooks=event_hooks, - base_url=base_url, - trust_env=trust_env, - default_encoding=default_encoding, - ) - - if http2: - try: - import h2 # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using http2=True, but the 'h2' package is not installed. " - "Make sure to install httpx using `pip install httpx[http2]`." - ) from None - - if proxies: - message = ( - "The 'proxies' argument is now deprecated." - " Use 'proxy' or 'mounts' instead." - ) - warnings.warn(message, DeprecationWarning) - if proxy: - raise RuntimeError("Use either `proxy` or 'proxies', not both.") - - if app: - message = ( - "The 'app' shortcut is now deprecated." - " Use the explicit style 'transport=ASGITransport(app=...)' instead." - ) - warnings.warn(message, DeprecationWarning) - - allow_env_proxies = trust_env and app is None and transport is None - proxy_map = self._get_proxy_map(proxies or proxy, allow_env_proxies) - - self._transport = self._init_transport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - transport=transport, - app=app, - trust_env=trust_env, - ) - - self._mounts: dict[URLPattern, AsyncBaseTransport | None] = { - URLPattern(key): None - if proxy is None - else self._init_proxy_transport( - proxy, - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - ) - for key, proxy in proxy_map.items() - } - if mounts is not None: - self._mounts.update( - {URLPattern(key): transport for key, transport in mounts.items()} - ) - self._mounts = dict(sorted(self._mounts.items())) - - def _init_transport( - self, - verify: VerifyTypes = True, - cert: CertTypes | None = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - transport: AsyncBaseTransport | None = None, - app: typing.Callable[..., typing.Any] | None = None, - trust_env: bool = True, - ) -> AsyncBaseTransport: - if transport is not None: - return transport - - if app is not None: - return ASGITransport(app=app) - - return AsyncHTTPTransport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - ) - - def _init_proxy_transport( - self, - proxy: Proxy, - verify: VerifyTypes = True, - cert: CertTypes | None = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - trust_env: bool = True, - ) -> AsyncBaseTransport: - return AsyncHTTPTransport( - verify=verify, - cert=cert, - http1=http1, - http2=http2, - limits=limits, - trust_env=trust_env, - proxy=proxy, - ) - - def _transport_for_url(self, url: URL) -> AsyncBaseTransport: - """ - Returns the transport instance that should be used for a given URL. - This will either be the standard connection pool, or a proxy. - """ - for pattern, transport in self._mounts.items(): - if pattern.matches(url): - return self._transport if transport is None else transport - - return self._transport - - async def request( - self, - method: str, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Build and send a request. - - Equivalent to: - - ```python - request = client.build_request(...) - response = await client.send(request, ...) - ``` - - See `AsyncClient.build_request()`, `AsyncClient.send()` - and [Merging of configuration][0] for how the various parameters - are merged with client-level configuration. - - [0]: /advanced/clients/#merging-of-configuration - """ - - if cookies is not None: # pragma: no cover - message = ( - "Setting per-request cookies=<...> is being deprecated, because " - "the expected behaviour on cookie persistence is ambiguous. Set " - "cookies directly on the client instance instead." - ) - warnings.warn(message, DeprecationWarning) - - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - return await self.send(request, auth=auth, follow_redirects=follow_redirects) - - @asynccontextmanager - async def stream( - self, - method: str, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> typing.AsyncIterator[Response]: - """ - Alternative to `httpx.request()` that streams the response body - instead of loading it into memory at once. - - **Parameters**: See `httpx.request`. - - See also: [Streaming Responses][0] - - [0]: /quickstart#streaming-responses - """ - request = self.build_request( - method=method, - url=url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - timeout=timeout, - extensions=extensions, - ) - response = await self.send( - request=request, - auth=auth, - follow_redirects=follow_redirects, - stream=True, - ) - try: - yield response - finally: - await response.aclose() - - async def send( - self, - request: Request, - *, - stream: bool = False, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - ) -> Response: - """ - Send a request. - - The request is sent as-is, unmodified. - - Typically you'll want to build one with `AsyncClient.build_request()` - so that any client-level configuration is merged into the request, - but passing an explicit `httpx.Request()` is supported as well. - - See also: [Request instances][0] - - [0]: /advanced/clients/#request-instances - """ - if self._state == ClientState.CLOSED: - raise RuntimeError("Cannot send a request, as the client has been closed.") - - self._state = ClientState.OPENED - follow_redirects = ( - self.follow_redirects - if isinstance(follow_redirects, UseClientDefault) - else follow_redirects - ) - - self._set_timeout(request) - - auth = self._build_request_auth(request, auth) - - response = await self._send_handling_auth( - request, - auth=auth, - follow_redirects=follow_redirects, - history=[], - ) - try: - if not stream: - await response.aread() - - return response - - except BaseException as exc: - await response.aclose() - raise exc - - async def _send_handling_auth( - self, - request: Request, - auth: Auth, - follow_redirects: bool, - history: list[Response], - ) -> Response: - auth_flow = auth.async_auth_flow(request) - try: - request = await auth_flow.__anext__() - - while True: - response = await self._send_handling_redirects( - request, - follow_redirects=follow_redirects, - history=history, - ) - try: - try: - next_request = await auth_flow.asend(response) - except StopAsyncIteration: - return response - - response.history = list(history) - await response.aread() - request = next_request - history.append(response) - - except BaseException as exc: - await response.aclose() - raise exc - finally: - await auth_flow.aclose() - - async def _send_handling_redirects( - self, - request: Request, - follow_redirects: bool, - history: list[Response], - ) -> Response: - while True: - if len(history) > self.max_redirects: - raise TooManyRedirects( - "Exceeded maximum allowed redirects.", request=request - ) - - for hook in self._event_hooks["request"]: - await hook(request) - - response = await self._send_single_request(request) - try: - for hook in self._event_hooks["response"]: - await hook(response) - - response.history = list(history) - - if not response.has_redirect_location: - return response - - request = self._build_redirect_request(request, response) - history = history + [response] - - if follow_redirects: - await response.aread() - else: - response.next_request = request - return response - - except BaseException as exc: - await response.aclose() - raise exc - - async def _send_single_request(self, request: Request) -> Response: - """ - Sends a single request, without handling any redirections. - """ - transport = self._transport_for_url(request.url) - timer = Timer() - await timer.async_start() - - if not isinstance(request.stream, AsyncByteStream): - raise RuntimeError( - "Attempted to send an sync request with an AsyncClient instance." - ) - - with request_context(request=request): - response = await transport.handle_async_request(request) - - assert isinstance(response.stream, AsyncByteStream) - response.request = request - response.stream = BoundAsyncStream( - response.stream, response=response, timer=timer - ) - self.cookies.extract_cookies(response) - response.default_encoding = self._default_encoding - - logger.info( - 'HTTP Request: %s %s "%s %d %s"', - request.method, - request.url, - response.http_version, - response.status_code, - response.reason_phrase, - ) - - return response - - async def get( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault | None = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `GET` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "GET", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def options( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send an `OPTIONS` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "OPTIONS", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def head( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `HEAD` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "HEAD", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def post( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `POST` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "POST", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def put( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `PUT` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "PUT", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def patch( - self, - url: URL | str, - *, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `PATCH` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "PATCH", - url, - content=content, - data=data, - files=files, - json=json, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def delete( - self, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - auth: AuthTypes | UseClientDefault = USE_CLIENT_DEFAULT, - follow_redirects: bool | UseClientDefault = USE_CLIENT_DEFAULT, - timeout: TimeoutTypes | UseClientDefault = USE_CLIENT_DEFAULT, - extensions: RequestExtensions | None = None, - ) -> Response: - """ - Send a `DELETE` request. - - **Parameters**: See `httpx.request`. - """ - return await self.request( - "DELETE", - url, - params=params, - headers=headers, - cookies=cookies, - auth=auth, - follow_redirects=follow_redirects, - timeout=timeout, - extensions=extensions, - ) - - async def aclose(self) -> None: - """ - Close transport and proxies. - """ - if self._state != ClientState.CLOSED: - self._state = ClientState.CLOSED - - await self._transport.aclose() - for proxy in self._mounts.values(): - if proxy is not None: - await proxy.aclose() - - async def __aenter__(self: U) -> U: - if self._state != ClientState.UNOPENED: - msg = { - ClientState.OPENED: "Cannot open a client instance more than once.", - ClientState.CLOSED: ( - "Cannot reopen a client instance, once it has been closed." - ), - }[self._state] - raise RuntimeError(msg) - - self._state = ClientState.OPENED - - await self._transport.__aenter__() - for proxy in self._mounts.values(): - if proxy is not None: - await proxy.__aenter__() - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - self._state = ClientState.CLOSED - - await self._transport.__aexit__(exc_type, exc_value, traceback) - for proxy in self._mounts.values(): - if proxy is not None: - await proxy.__aexit__(exc_type, exc_value, traceback) diff --git a/myenv/Lib/site-packages/httpx/_compat.py b/myenv/Lib/site-packages/httpx/_compat.py deleted file mode 100644 index 7d86dce..0000000 --- a/myenv/Lib/site-packages/httpx/_compat.py +++ /dev/null @@ -1,63 +0,0 @@ -""" -The _compat module is used for code which requires branching between different -Python environments. It is excluded from the code coverage checks. -""" - -import re -import ssl -import sys -from types import ModuleType -from typing import Optional - -# Brotli support is optional -# The C bindings in `brotli` are recommended for CPython. -# The CFFI bindings in `brotlicffi` are recommended for PyPy and everything else. -try: - import brotlicffi as brotli -except ImportError: # pragma: no cover - try: - import brotli - except ImportError: - brotli = None - -# Zstandard support is optional -zstd: Optional[ModuleType] = None -try: - import zstandard as zstd -except (AttributeError, ImportError, ValueError): # Defensive: - zstd = None -else: - # The package 'zstandard' added the 'eof' property starting - # in v0.18.0 which we require to ensure a complete and - # valid zstd stream was fed into the ZstdDecoder. - # See: https://github.com/urllib3/urllib3/pull/2624 - _zstd_version = tuple( - map(int, re.search(r"^([0-9]+)\.([0-9]+)", zstd.__version__).groups()) # type: ignore[union-attr] - ) - if _zstd_version < (0, 18): # Defensive: - zstd = None - - -if sys.version_info >= (3, 10) or ssl.OPENSSL_VERSION_INFO >= (1, 1, 0, 7): - - def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: - # The OP_NO_SSL* and OP_NO_TLS* become deprecated in favor of - # 'SSLContext.minimum_version' from Python 3.7 onwards, however - # this attribute is not available unless the ssl module is compiled - # with OpenSSL 1.1.0g or newer. - # https://docs.python.org/3.10/library/ssl.html#ssl.SSLContext.minimum_version - # https://docs.python.org/3.7/library/ssl.html#ssl.SSLContext.minimum_version - context.minimum_version = ssl.TLSVersion.TLSv1_2 - -else: - - def set_minimum_tls_version_1_2(context: ssl.SSLContext) -> None: - # If 'minimum_version' isn't available, we configure these options with - # the older deprecated variants. - context.options |= ssl.OP_NO_SSLv2 - context.options |= ssl.OP_NO_SSLv3 - context.options |= ssl.OP_NO_TLSv1 - context.options |= ssl.OP_NO_TLSv1_1 - - -__all__ = ["brotli", "set_minimum_tls_version_1_2"] diff --git a/myenv/Lib/site-packages/httpx/_config.py b/myenv/Lib/site-packages/httpx/_config.py deleted file mode 100644 index 1b12911..0000000 --- a/myenv/Lib/site-packages/httpx/_config.py +++ /dev/null @@ -1,372 +0,0 @@ -from __future__ import annotations - -import logging -import os -import ssl -import typing -from pathlib import Path - -import certifi - -from ._compat import set_minimum_tls_version_1_2 -from ._models import Headers -from ._types import CertTypes, HeaderTypes, TimeoutTypes, VerifyTypes -from ._urls import URL -from ._utils import get_ca_bundle_from_env - -__all__ = ["Limits", "Proxy", "Timeout", "create_ssl_context"] - -DEFAULT_CIPHERS = ":".join( - [ - "ECDHE+AESGCM", - "ECDHE+CHACHA20", - "DHE+AESGCM", - "DHE+CHACHA20", - "ECDH+AESGCM", - "DH+AESGCM", - "ECDH+AES", - "DH+AES", - "RSA+AESGCM", - "RSA+AES", - "!aNULL", - "!eNULL", - "!MD5", - "!DSS", - ] -) - - -logger = logging.getLogger("httpx") - - -class UnsetType: - pass # pragma: no cover - - -UNSET = UnsetType() - - -def create_ssl_context( - cert: CertTypes | None = None, - verify: VerifyTypes = True, - trust_env: bool = True, - http2: bool = False, -) -> ssl.SSLContext: - return SSLConfig( - cert=cert, verify=verify, trust_env=trust_env, http2=http2 - ).ssl_context - - -class SSLConfig: - """ - SSL Configuration. - """ - - DEFAULT_CA_BUNDLE_PATH = Path(certifi.where()) - - def __init__( - self, - *, - cert: CertTypes | None = None, - verify: VerifyTypes = True, - trust_env: bool = True, - http2: bool = False, - ) -> None: - self.cert = cert - self.verify = verify - self.trust_env = trust_env - self.http2 = http2 - self.ssl_context = self.load_ssl_context() - - def load_ssl_context(self) -> ssl.SSLContext: - logger.debug( - "load_ssl_context verify=%r cert=%r trust_env=%r http2=%r", - self.verify, - self.cert, - self.trust_env, - self.http2, - ) - - if self.verify: - return self.load_ssl_context_verify() - return self.load_ssl_context_no_verify() - - def load_ssl_context_no_verify(self) -> ssl.SSLContext: - """ - Return an SSL context for unverified connections. - """ - context = self._create_default_ssl_context() - context.check_hostname = False - context.verify_mode = ssl.CERT_NONE - self._load_client_certs(context) - return context - - def load_ssl_context_verify(self) -> ssl.SSLContext: - """ - Return an SSL context for verified connections. - """ - if self.trust_env and self.verify is True: - ca_bundle = get_ca_bundle_from_env() - if ca_bundle is not None: - self.verify = ca_bundle - - if isinstance(self.verify, ssl.SSLContext): - # Allow passing in our own SSLContext object that's pre-configured. - context = self.verify - self._load_client_certs(context) - return context - elif isinstance(self.verify, bool): - ca_bundle_path = self.DEFAULT_CA_BUNDLE_PATH - elif Path(self.verify).exists(): - ca_bundle_path = Path(self.verify) - else: - raise IOError( - "Could not find a suitable TLS CA certificate bundle, " - "invalid path: {}".format(self.verify) - ) - - context = self._create_default_ssl_context() - context.verify_mode = ssl.CERT_REQUIRED - context.check_hostname = True - - # Signal to server support for PHA in TLS 1.3. Raises an - # AttributeError if only read-only access is implemented. - try: - context.post_handshake_auth = True - except AttributeError: # pragma: no cover - pass - - # Disable using 'commonName' for SSLContext.check_hostname - # when the 'subjectAltName' extension isn't available. - try: - context.hostname_checks_common_name = False - except AttributeError: # pragma: no cover - pass - - if ca_bundle_path.is_file(): - cafile = str(ca_bundle_path) - logger.debug("load_verify_locations cafile=%r", cafile) - context.load_verify_locations(cafile=cafile) - elif ca_bundle_path.is_dir(): - capath = str(ca_bundle_path) - logger.debug("load_verify_locations capath=%r", capath) - context.load_verify_locations(capath=capath) - - self._load_client_certs(context) - - return context - - def _create_default_ssl_context(self) -> ssl.SSLContext: - """ - Creates the default SSLContext object that's used for both verified - and unverified connections. - """ - context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) - set_minimum_tls_version_1_2(context) - context.options |= ssl.OP_NO_COMPRESSION - context.set_ciphers(DEFAULT_CIPHERS) - - if ssl.HAS_ALPN: - alpn_idents = ["http/1.1", "h2"] if self.http2 else ["http/1.1"] - context.set_alpn_protocols(alpn_idents) - - keylogfile = os.environ.get("SSLKEYLOGFILE") - if keylogfile and self.trust_env: - context.keylog_filename = keylogfile - - return context - - def _load_client_certs(self, ssl_context: ssl.SSLContext) -> None: - """ - Loads client certificates into our SSLContext object - """ - if self.cert is not None: - if isinstance(self.cert, str): - ssl_context.load_cert_chain(certfile=self.cert) - elif isinstance(self.cert, tuple) and len(self.cert) == 2: - ssl_context.load_cert_chain(certfile=self.cert[0], keyfile=self.cert[1]) - elif isinstance(self.cert, tuple) and len(self.cert) == 3: - ssl_context.load_cert_chain( - certfile=self.cert[0], - keyfile=self.cert[1], - password=self.cert[2], - ) - - -class Timeout: - """ - Timeout configuration. - - **Usage**: - - Timeout(None) # No timeouts. - Timeout(5.0) # 5s timeout on all operations. - Timeout(None, connect=5.0) # 5s timeout on connect, no other timeouts. - Timeout(5.0, connect=10.0) # 10s timeout on connect. 5s timeout elsewhere. - Timeout(5.0, pool=None) # No timeout on acquiring connection from pool. - # 5s timeout elsewhere. - """ - - def __init__( - self, - timeout: TimeoutTypes | UnsetType = UNSET, - *, - connect: None | float | UnsetType = UNSET, - read: None | float | UnsetType = UNSET, - write: None | float | UnsetType = UNSET, - pool: None | float | UnsetType = UNSET, - ) -> None: - if isinstance(timeout, Timeout): - # Passed as a single explicit Timeout. - assert connect is UNSET - assert read is UNSET - assert write is UNSET - assert pool is UNSET - self.connect = timeout.connect # type: typing.Optional[float] - self.read = timeout.read # type: typing.Optional[float] - self.write = timeout.write # type: typing.Optional[float] - self.pool = timeout.pool # type: typing.Optional[float] - elif isinstance(timeout, tuple): - # Passed as a tuple. - self.connect = timeout[0] - self.read = timeout[1] - self.write = None if len(timeout) < 3 else timeout[2] - self.pool = None if len(timeout) < 4 else timeout[3] - elif not ( - isinstance(connect, UnsetType) - or isinstance(read, UnsetType) - or isinstance(write, UnsetType) - or isinstance(pool, UnsetType) - ): - self.connect = connect - self.read = read - self.write = write - self.pool = pool - else: - if isinstance(timeout, UnsetType): - raise ValueError( - "httpx.Timeout must either include a default, or set all " - "four parameters explicitly." - ) - self.connect = timeout if isinstance(connect, UnsetType) else connect - self.read = timeout if isinstance(read, UnsetType) else read - self.write = timeout if isinstance(write, UnsetType) else write - self.pool = timeout if isinstance(pool, UnsetType) else pool - - def as_dict(self) -> dict[str, float | None]: - return { - "connect": self.connect, - "read": self.read, - "write": self.write, - "pool": self.pool, - } - - def __eq__(self, other: typing.Any) -> bool: - return ( - isinstance(other, self.__class__) - and self.connect == other.connect - and self.read == other.read - and self.write == other.write - and self.pool == other.pool - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - if len({self.connect, self.read, self.write, self.pool}) == 1: - return f"{class_name}(timeout={self.connect})" - return ( - f"{class_name}(connect={self.connect}, " - f"read={self.read}, write={self.write}, pool={self.pool})" - ) - - -class Limits: - """ - Configuration for limits to various client behaviors. - - **Parameters:** - - * **max_connections** - The maximum number of concurrent connections that may be - established. - * **max_keepalive_connections** - Allow the connection pool to maintain - keep-alive connections below this point. Should be less than or equal - to `max_connections`. - * **keepalive_expiry** - Time limit on idle keep-alive connections in seconds. - """ - - def __init__( - self, - *, - max_connections: int | None = None, - max_keepalive_connections: int | None = None, - keepalive_expiry: float | None = 5.0, - ) -> None: - self.max_connections = max_connections - self.max_keepalive_connections = max_keepalive_connections - self.keepalive_expiry = keepalive_expiry - - def __eq__(self, other: typing.Any) -> bool: - return ( - isinstance(other, self.__class__) - and self.max_connections == other.max_connections - and self.max_keepalive_connections == other.max_keepalive_connections - and self.keepalive_expiry == other.keepalive_expiry - ) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - return ( - f"{class_name}(max_connections={self.max_connections}, " - f"max_keepalive_connections={self.max_keepalive_connections}, " - f"keepalive_expiry={self.keepalive_expiry})" - ) - - -class Proxy: - def __init__( - self, - url: URL | str, - *, - ssl_context: ssl.SSLContext | None = None, - auth: tuple[str, str] | None = None, - headers: HeaderTypes | None = None, - ) -> None: - url = URL(url) - headers = Headers(headers) - - if url.scheme not in ("http", "https", "socks5"): - raise ValueError(f"Unknown scheme for proxy URL {url!r}") - - if url.username or url.password: - # Remove any auth credentials from the URL. - auth = (url.username, url.password) - url = url.copy_with(username=None, password=None) - - self.url = url - self.auth = auth - self.headers = headers - self.ssl_context = ssl_context - - @property - def raw_auth(self) -> tuple[bytes, bytes] | None: - # The proxy authentication as raw bytes. - return ( - None - if self.auth is None - else (self.auth[0].encode("utf-8"), self.auth[1].encode("utf-8")) - ) - - def __repr__(self) -> str: - # The authentication is represented with the password component masked. - auth = (self.auth[0], "********") if self.auth else None - - # Build a nice concise representation. - url_str = f"{str(self.url)!r}" - auth_str = f", auth={auth!r}" if auth else "" - headers_str = f", headers={dict(self.headers)!r}" if self.headers else "" - return f"Proxy({url_str}{auth_str}{headers_str})" - - -DEFAULT_TIMEOUT_CONFIG = Timeout(timeout=5.0) -DEFAULT_LIMITS = Limits(max_connections=100, max_keepalive_connections=20) -DEFAULT_MAX_REDIRECTS = 20 diff --git a/myenv/Lib/site-packages/httpx/_content.py b/myenv/Lib/site-packages/httpx/_content.py deleted file mode 100644 index 786699f..0000000 --- a/myenv/Lib/site-packages/httpx/_content.py +++ /dev/null @@ -1,238 +0,0 @@ -from __future__ import annotations - -import inspect -import warnings -from json import dumps as json_dumps -from typing import ( - Any, - AsyncIterable, - AsyncIterator, - Iterable, - Iterator, - Mapping, -) -from urllib.parse import urlencode - -from ._exceptions import StreamClosed, StreamConsumed -from ._multipart import MultipartStream -from ._types import ( - AsyncByteStream, - RequestContent, - RequestData, - RequestFiles, - ResponseContent, - SyncByteStream, -) -from ._utils import peek_filelike_length, primitive_value_to_str - -__all__ = ["ByteStream"] - - -class ByteStream(AsyncByteStream, SyncByteStream): - def __init__(self, stream: bytes) -> None: - self._stream = stream - - def __iter__(self) -> Iterator[bytes]: - yield self._stream - - async def __aiter__(self) -> AsyncIterator[bytes]: - yield self._stream - - -class IteratorByteStream(SyncByteStream): - CHUNK_SIZE = 65_536 - - def __init__(self, stream: Iterable[bytes]) -> None: - self._stream = stream - self._is_stream_consumed = False - self._is_generator = inspect.isgenerator(stream) - - def __iter__(self) -> Iterator[bytes]: - if self._is_stream_consumed and self._is_generator: - raise StreamConsumed() - - self._is_stream_consumed = True - if hasattr(self._stream, "read"): - # File-like interfaces should use 'read' directly. - chunk = self._stream.read(self.CHUNK_SIZE) - while chunk: - yield chunk - chunk = self._stream.read(self.CHUNK_SIZE) - else: - # Otherwise iterate. - for part in self._stream: - yield part - - -class AsyncIteratorByteStream(AsyncByteStream): - CHUNK_SIZE = 65_536 - - def __init__(self, stream: AsyncIterable[bytes]) -> None: - self._stream = stream - self._is_stream_consumed = False - self._is_generator = inspect.isasyncgen(stream) - - async def __aiter__(self) -> AsyncIterator[bytes]: - if self._is_stream_consumed and self._is_generator: - raise StreamConsumed() - - self._is_stream_consumed = True - if hasattr(self._stream, "aread"): - # File-like interfaces should use 'aread' directly. - chunk = await self._stream.aread(self.CHUNK_SIZE) - while chunk: - yield chunk - chunk = await self._stream.aread(self.CHUNK_SIZE) - else: - # Otherwise iterate. - async for part in self._stream: - yield part - - -class UnattachedStream(AsyncByteStream, SyncByteStream): - """ - If a request or response is serialized using pickle, then it is no longer - attached to a stream for I/O purposes. Any stream operations should result - in `httpx.StreamClosed`. - """ - - def __iter__(self) -> Iterator[bytes]: - raise StreamClosed() - - async def __aiter__(self) -> AsyncIterator[bytes]: - raise StreamClosed() - yield b"" # pragma: no cover - - -def encode_content( - content: str | bytes | Iterable[bytes] | AsyncIterable[bytes], -) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: - if isinstance(content, (bytes, str)): - body = content.encode("utf-8") if isinstance(content, str) else content - content_length = len(body) - headers = {"Content-Length": str(content_length)} if body else {} - return headers, ByteStream(body) - - elif isinstance(content, Iterable) and not isinstance(content, dict): - # `not isinstance(content, dict)` is a bit oddly specific, but it - # catches a case that's easy for users to make in error, and would - # otherwise pass through here, like any other bytes-iterable, - # because `dict` happens to be iterable. See issue #2491. - content_length_or_none = peek_filelike_length(content) - - if content_length_or_none is None: - headers = {"Transfer-Encoding": "chunked"} - else: - headers = {"Content-Length": str(content_length_or_none)} - return headers, IteratorByteStream(content) # type: ignore - - elif isinstance(content, AsyncIterable): - headers = {"Transfer-Encoding": "chunked"} - return headers, AsyncIteratorByteStream(content) - - raise TypeError(f"Unexpected type for 'content', {type(content)!r}") - - -def encode_urlencoded_data( - data: RequestData, -) -> tuple[dict[str, str], ByteStream]: - plain_data = [] - for key, value in data.items(): - if isinstance(value, (list, tuple)): - plain_data.extend([(key, primitive_value_to_str(item)) for item in value]) - else: - plain_data.append((key, primitive_value_to_str(value))) - body = urlencode(plain_data, doseq=True).encode("utf-8") - content_length = str(len(body)) - content_type = "application/x-www-form-urlencoded" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_multipart_data( - data: RequestData, files: RequestFiles, boundary: bytes | None -) -> tuple[dict[str, str], MultipartStream]: - multipart = MultipartStream(data=data, files=files, boundary=boundary) - headers = multipart.get_headers() - return headers, multipart - - -def encode_text(text: str) -> tuple[dict[str, str], ByteStream]: - body = text.encode("utf-8") - content_length = str(len(body)) - content_type = "text/plain; charset=utf-8" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_html(html: str) -> tuple[dict[str, str], ByteStream]: - body = html.encode("utf-8") - content_length = str(len(body)) - content_type = "text/html; charset=utf-8" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_json(json: Any) -> tuple[dict[str, str], ByteStream]: - body = json_dumps(json).encode("utf-8") - content_length = str(len(body)) - content_type = "application/json" - headers = {"Content-Length": content_length, "Content-Type": content_type} - return headers, ByteStream(body) - - -def encode_request( - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: Any | None = None, - boundary: bytes | None = None, -) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: - """ - Handles encoding the given `content`, `data`, `files`, and `json`, - returning a two-tuple of (, ). - """ - if data is not None and not isinstance(data, Mapping): - # We prefer to separate `content=` - # for raw request content, and `data=
` for url encoded or - # multipart form content. - # - # However for compat with requests, we *do* still support - # `data=` usages. We deal with that case here, treating it - # as if `content=<...>` had been supplied instead. - message = "Use 'content=<...>' to upload raw bytes/text content." - warnings.warn(message, DeprecationWarning) - return encode_content(data) - - if content is not None: - return encode_content(content) - elif files: - return encode_multipart_data(data or {}, files, boundary) - elif data: - return encode_urlencoded_data(data) - elif json is not None: - return encode_json(json) - - return {}, ByteStream(b"") - - -def encode_response( - content: ResponseContent | None = None, - text: str | None = None, - html: str | None = None, - json: Any | None = None, -) -> tuple[dict[str, str], SyncByteStream | AsyncByteStream]: - """ - Handles encoding the given `content`, returning a two-tuple of - (, ). - """ - if content is not None: - return encode_content(content) - elif text is not None: - return encode_text(text) - elif html is not None: - return encode_html(html) - elif json is not None: - return encode_json(json) - - return {}, ByteStream(b"") diff --git a/myenv/Lib/site-packages/httpx/_decoders.py b/myenv/Lib/site-packages/httpx/_decoders.py deleted file mode 100644 index 62f2c0b..0000000 --- a/myenv/Lib/site-packages/httpx/_decoders.py +++ /dev/null @@ -1,371 +0,0 @@ -""" -Handlers for Content-Encoding. - -See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding -""" - -from __future__ import annotations - -import codecs -import io -import typing -import zlib - -from ._compat import brotli, zstd -from ._exceptions import DecodingError - - -class ContentDecoder: - def decode(self, data: bytes) -> bytes: - raise NotImplementedError() # pragma: no cover - - def flush(self) -> bytes: - raise NotImplementedError() # pragma: no cover - - -class IdentityDecoder(ContentDecoder): - """ - Handle unencoded data. - """ - - def decode(self, data: bytes) -> bytes: - return data - - def flush(self) -> bytes: - return b"" - - -class DeflateDecoder(ContentDecoder): - """ - Handle 'deflate' decoding. - - See: https://stackoverflow.com/questions/1838699 - """ - - def __init__(self) -> None: - self.first_attempt = True - self.decompressor = zlib.decompressobj() - - def decode(self, data: bytes) -> bytes: - was_first_attempt = self.first_attempt - self.first_attempt = False - try: - return self.decompressor.decompress(data) - except zlib.error as exc: - if was_first_attempt: - self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) - return self.decode(data) - raise DecodingError(str(exc)) from exc - - def flush(self) -> bytes: - try: - return self.decompressor.flush() - except zlib.error as exc: # pragma: no cover - raise DecodingError(str(exc)) from exc - - -class GZipDecoder(ContentDecoder): - """ - Handle 'gzip' decoding. - - See: https://stackoverflow.com/questions/1838699 - """ - - def __init__(self) -> None: - self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) - - def decode(self, data: bytes) -> bytes: - try: - return self.decompressor.decompress(data) - except zlib.error as exc: - raise DecodingError(str(exc)) from exc - - def flush(self) -> bytes: - try: - return self.decompressor.flush() - except zlib.error as exc: # pragma: no cover - raise DecodingError(str(exc)) from exc - - -class BrotliDecoder(ContentDecoder): - """ - Handle 'brotli' decoding. - - Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ - or `pip install brotli`. See https://github.com/google/brotli - Supports both 'brotlipy' and 'Brotli' packages since they share an import - name. The top branches are for 'brotlipy' and bottom branches for 'Brotli' - """ - - def __init__(self) -> None: - if brotli is None: # pragma: no cover - raise ImportError( - "Using 'BrotliDecoder', but neither of the 'brotlicffi' or 'brotli' " - "packages have been installed. " - "Make sure to install httpx using `pip install httpx[brotli]`." - ) from None - - self.decompressor = brotli.Decompressor() - self.seen_data = False - self._decompress: typing.Callable[[bytes], bytes] - if hasattr(self.decompressor, "decompress"): - # The 'brotlicffi' package. - self._decompress = self.decompressor.decompress # pragma: no cover - else: - # The 'brotli' package. - self._decompress = self.decompressor.process # pragma: no cover - - def decode(self, data: bytes) -> bytes: - if not data: - return b"" - self.seen_data = True - try: - return self._decompress(data) - except brotli.error as exc: - raise DecodingError(str(exc)) from exc - - def flush(self) -> bytes: - if not self.seen_data: - return b"" - try: - if hasattr(self.decompressor, "finish"): - # Only available in the 'brotlicffi' package. - - # As the decompressor decompresses eagerly, this - # will never actually emit any data. However, it will potentially throw - # errors if a truncated or damaged data stream has been used. - self.decompressor.finish() # pragma: no cover - return b"" - except brotli.error as exc: # pragma: no cover - raise DecodingError(str(exc)) from exc - - -class ZStandardDecoder(ContentDecoder): - """ - Handle 'zstd' RFC 8878 decoding. - - Requires `pip install zstandard`. - Can be installed as a dependency of httpx using `pip install httpx[zstd]`. - """ - - # inspired by the ZstdDecoder implementation in urllib3 - def __init__(self) -> None: - if zstd is None: # pragma: no cover - raise ImportError( - "Using 'ZStandardDecoder', ..." - "Make sure to install httpx using `pip install httpx[zstd]`." - ) from None - - self.decompressor = zstd.ZstdDecompressor().decompressobj() - - def decode(self, data: bytes) -> bytes: - assert zstd is not None - output = io.BytesIO() - try: - output.write(self.decompressor.decompress(data)) - while self.decompressor.eof and self.decompressor.unused_data: - unused_data = self.decompressor.unused_data - self.decompressor = zstd.ZstdDecompressor().decompressobj() - output.write(self.decompressor.decompress(unused_data)) - except zstd.ZstdError as exc: - raise DecodingError(str(exc)) from exc - return output.getvalue() - - def flush(self) -> bytes: - ret = self.decompressor.flush() # note: this is a no-op - if not self.decompressor.eof: - raise DecodingError("Zstandard data is incomplete") # pragma: no cover - return bytes(ret) - - -class MultiDecoder(ContentDecoder): - """ - Handle the case where multiple encodings have been applied. - """ - - def __init__(self, children: typing.Sequence[ContentDecoder]) -> None: - """ - 'children' should be a sequence of decoders in the order in which - each was applied. - """ - # Note that we reverse the order for decoding. - self.children = list(reversed(children)) - - def decode(self, data: bytes) -> bytes: - for child in self.children: - data = child.decode(data) - return data - - def flush(self) -> bytes: - data = b"" - for child in self.children: - data = child.decode(data) + child.flush() - return data - - -class ByteChunker: - """ - Handles returning byte content in fixed-size chunks. - """ - - def __init__(self, chunk_size: int | None = None) -> None: - self._buffer = io.BytesIO() - self._chunk_size = chunk_size - - def decode(self, content: bytes) -> list[bytes]: - if self._chunk_size is None: - return [content] if content else [] - - self._buffer.write(content) - if self._buffer.tell() >= self._chunk_size: - value = self._buffer.getvalue() - chunks = [ - value[i : i + self._chunk_size] - for i in range(0, len(value), self._chunk_size) - ] - if len(chunks[-1]) == self._chunk_size: - self._buffer.seek(0) - self._buffer.truncate() - return chunks - else: - self._buffer.seek(0) - self._buffer.write(chunks[-1]) - self._buffer.truncate() - return chunks[:-1] - else: - return [] - - def flush(self) -> list[bytes]: - value = self._buffer.getvalue() - self._buffer.seek(0) - self._buffer.truncate() - return [value] if value else [] - - -class TextChunker: - """ - Handles returning text content in fixed-size chunks. - """ - - def __init__(self, chunk_size: int | None = None) -> None: - self._buffer = io.StringIO() - self._chunk_size = chunk_size - - def decode(self, content: str) -> list[str]: - if self._chunk_size is None: - return [content] if content else [] - - self._buffer.write(content) - if self._buffer.tell() >= self._chunk_size: - value = self._buffer.getvalue() - chunks = [ - value[i : i + self._chunk_size] - for i in range(0, len(value), self._chunk_size) - ] - if len(chunks[-1]) == self._chunk_size: - self._buffer.seek(0) - self._buffer.truncate() - return chunks - else: - self._buffer.seek(0) - self._buffer.write(chunks[-1]) - self._buffer.truncate() - return chunks[:-1] - else: - return [] - - def flush(self) -> list[str]: - value = self._buffer.getvalue() - self._buffer.seek(0) - self._buffer.truncate() - return [value] if value else [] - - -class TextDecoder: - """ - Handles incrementally decoding bytes into text - """ - - def __init__(self, encoding: str = "utf-8") -> None: - self.decoder = codecs.getincrementaldecoder(encoding)(errors="replace") - - def decode(self, data: bytes) -> str: - return self.decoder.decode(data) - - def flush(self) -> str: - return self.decoder.decode(b"", True) - - -class LineDecoder: - """ - Handles incrementally reading lines from text. - - Has the same behaviour as the stdllib splitlines, - but handling the input iteratively. - """ - - def __init__(self) -> None: - self.buffer: list[str] = [] - self.trailing_cr: bool = False - - def decode(self, text: str) -> list[str]: - # See https://docs.python.org/3/library/stdtypes.html#str.splitlines - NEWLINE_CHARS = "\n\r\x0b\x0c\x1c\x1d\x1e\x85\u2028\u2029" - - # We always push a trailing `\r` into the next decode iteration. - if self.trailing_cr: - text = "\r" + text - self.trailing_cr = False - if text.endswith("\r"): - self.trailing_cr = True - text = text[:-1] - - if not text: - # NOTE: the edge case input of empty text doesn't occur in practice, - # because other httpx internals filter out this value - return [] # pragma: no cover - - trailing_newline = text[-1] in NEWLINE_CHARS - lines = text.splitlines() - - if len(lines) == 1 and not trailing_newline: - # No new lines, buffer the input and continue. - self.buffer.append(lines[0]) - return [] - - if self.buffer: - # Include any existing buffer in the first portion of the - # splitlines result. - lines = ["".join(self.buffer) + lines[0]] + lines[1:] - self.buffer = [] - - if not trailing_newline: - # If the last segment of splitlines is not newline terminated, - # then drop it from our output and start a new buffer. - self.buffer = [lines.pop()] - - return lines - - def flush(self) -> list[str]: - if not self.buffer and not self.trailing_cr: - return [] - - lines = ["".join(self.buffer)] - self.buffer = [] - self.trailing_cr = False - return lines - - -SUPPORTED_DECODERS = { - "identity": IdentityDecoder, - "gzip": GZipDecoder, - "deflate": DeflateDecoder, - "br": BrotliDecoder, - "zstd": ZStandardDecoder, -} - - -if brotli is None: - SUPPORTED_DECODERS.pop("br") # pragma: no cover -if zstd is None: - SUPPORTED_DECODERS.pop("zstd") # pragma: no cover diff --git a/myenv/Lib/site-packages/httpx/_exceptions.py b/myenv/Lib/site-packages/httpx/_exceptions.py deleted file mode 100644 index 77f45a6..0000000 --- a/myenv/Lib/site-packages/httpx/_exceptions.py +++ /dev/null @@ -1,379 +0,0 @@ -""" -Our exception hierarchy: - -* HTTPError - x RequestError - + TransportError - - TimeoutException - · ConnectTimeout - · ReadTimeout - · WriteTimeout - · PoolTimeout - - NetworkError - · ConnectError - · ReadError - · WriteError - · CloseError - - ProtocolError - · LocalProtocolError - · RemoteProtocolError - - ProxyError - - UnsupportedProtocol - + DecodingError - + TooManyRedirects - x HTTPStatusError -* InvalidURL -* CookieConflict -* StreamError - x StreamConsumed - x StreamClosed - x ResponseNotRead - x RequestNotRead -""" - -from __future__ import annotations - -import contextlib -import typing - -if typing.TYPE_CHECKING: - from ._models import Request, Response # pragma: no cover - -__all__ = [ - "CloseError", - "ConnectError", - "ConnectTimeout", - "CookieConflict", - "DecodingError", - "HTTPError", - "HTTPStatusError", - "InvalidURL", - "LocalProtocolError", - "NetworkError", - "PoolTimeout", - "ProtocolError", - "ProxyError", - "ReadError", - "ReadTimeout", - "RemoteProtocolError", - "RequestError", - "RequestNotRead", - "ResponseNotRead", - "StreamClosed", - "StreamConsumed", - "StreamError", - "TimeoutException", - "TooManyRedirects", - "TransportError", - "UnsupportedProtocol", - "WriteError", - "WriteTimeout", -] - - -class HTTPError(Exception): - """ - Base class for `RequestError` and `HTTPStatusError`. - - Useful for `try...except` blocks when issuing a request, - and then calling `.raise_for_status()`. - - For example: - - ``` - try: - response = httpx.get("https://www.example.com") - response.raise_for_status() - except httpx.HTTPError as exc: - print(f"HTTP Exception for {exc.request.url} - {exc}") - ``` - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - self._request: Request | None = None - - @property - def request(self) -> Request: - if self._request is None: - raise RuntimeError("The .request property has not been set.") - return self._request - - @request.setter - def request(self, request: Request) -> None: - self._request = request - - -class RequestError(HTTPError): - """ - Base class for all exceptions that may occur when issuing a `.request()`. - """ - - def __init__(self, message: str, *, request: Request | None = None) -> None: - super().__init__(message) - # At the point an exception is raised we won't typically have a request - # instance to associate it with. - # - # The 'request_context' context manager is used within the Client and - # Response methods in order to ensure that any raised exceptions - # have a `.request` property set on them. - self._request = request - - -class TransportError(RequestError): - """ - Base class for all exceptions that occur at the level of the Transport API. - """ - - -# Timeout exceptions... - - -class TimeoutException(TransportError): - """ - The base class for timeout errors. - - An operation has timed out. - """ - - -class ConnectTimeout(TimeoutException): - """ - Timed out while connecting to the host. - """ - - -class ReadTimeout(TimeoutException): - """ - Timed out while receiving data from the host. - """ - - -class WriteTimeout(TimeoutException): - """ - Timed out while sending data to the host. - """ - - -class PoolTimeout(TimeoutException): - """ - Timed out waiting to acquire a connection from the pool. - """ - - -# Core networking exceptions... - - -class NetworkError(TransportError): - """ - The base class for network-related errors. - - An error occurred while interacting with the network. - """ - - -class ReadError(NetworkError): - """ - Failed to receive data from the network. - """ - - -class WriteError(NetworkError): - """ - Failed to send data through the network. - """ - - -class ConnectError(NetworkError): - """ - Failed to establish a connection. - """ - - -class CloseError(NetworkError): - """ - Failed to close a connection. - """ - - -# Other transport exceptions... - - -class ProxyError(TransportError): - """ - An error occurred while establishing a proxy connection. - """ - - -class UnsupportedProtocol(TransportError): - """ - Attempted to make a request to an unsupported protocol. - - For example issuing a request to `ftp://www.example.com`. - """ - - -class ProtocolError(TransportError): - """ - The protocol was violated. - """ - - -class LocalProtocolError(ProtocolError): - """ - A protocol was violated by the client. - - For example if the user instantiated a `Request` instance explicitly, - failed to include the mandatory `Host:` header, and then issued it directly - using `client.send()`. - """ - - -class RemoteProtocolError(ProtocolError): - """ - The protocol was violated by the server. - - For example, returning malformed HTTP. - """ - - -# Other request exceptions... - - -class DecodingError(RequestError): - """ - Decoding of the response failed, due to a malformed encoding. - """ - - -class TooManyRedirects(RequestError): - """ - Too many redirects. - """ - - -# Client errors - - -class HTTPStatusError(HTTPError): - """ - The response had an error HTTP status of 4xx or 5xx. - - May be raised when calling `response.raise_for_status()` - """ - - def __init__(self, message: str, *, request: Request, response: Response) -> None: - super().__init__(message) - self.request = request - self.response = response - - -class InvalidURL(Exception): - """ - URL is improperly formed or cannot be parsed. - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - - -class CookieConflict(Exception): - """ - Attempted to lookup a cookie by name, but multiple cookies existed. - - Can occur when calling `response.cookies.get(...)`. - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - - -# Stream exceptions... - -# These may occur as the result of a programming error, by accessing -# the request/response stream in an invalid manner. - - -class StreamError(RuntimeError): - """ - The base class for stream exceptions. - - The developer made an error in accessing the request stream in - an invalid way. - """ - - def __init__(self, message: str) -> None: - super().__init__(message) - - -class StreamConsumed(StreamError): - """ - Attempted to read or stream content, but the content has already - been streamed. - """ - - def __init__(self) -> None: - message = ( - "Attempted to read or stream some content, but the content has " - "already been streamed. For requests, this could be due to passing " - "a generator as request content, and then receiving a redirect " - "response or a secondary request as part of an authentication flow." - "For responses, this could be due to attempting to stream the response " - "content more than once." - ) - super().__init__(message) - - -class StreamClosed(StreamError): - """ - Attempted to read or stream response content, but the request has been - closed. - """ - - def __init__(self) -> None: - message = ( - "Attempted to read or stream content, but the stream has " "been closed." - ) - super().__init__(message) - - -class ResponseNotRead(StreamError): - """ - Attempted to access streaming response content, without having called `read()`. - """ - - def __init__(self) -> None: - message = ( - "Attempted to access streaming response content," - " without having called `read()`." - ) - super().__init__(message) - - -class RequestNotRead(StreamError): - """ - Attempted to access streaming request content, without having called `read()`. - """ - - def __init__(self) -> None: - message = ( - "Attempted to access streaming request content," - " without having called `read()`." - ) - super().__init__(message) - - -@contextlib.contextmanager -def request_context( - request: Request | None = None, -) -> typing.Iterator[None]: - """ - A context manager that can be used to attach the given request context - to any `RequestError` exceptions that are raised within the block. - """ - try: - yield - except RequestError as exc: - if request is not None: - exc.request = request - raise exc diff --git a/myenv/Lib/site-packages/httpx/_main.py b/myenv/Lib/site-packages/httpx/_main.py deleted file mode 100644 index 72657f8..0000000 --- a/myenv/Lib/site-packages/httpx/_main.py +++ /dev/null @@ -1,509 +0,0 @@ -from __future__ import annotations - -import functools -import json -import sys -import typing - -import click -import httpcore -import pygments.lexers -import pygments.util -import rich.console -import rich.markup -import rich.progress -import rich.syntax -import rich.table - -from ._client import Client -from ._exceptions import RequestError -from ._models import Response -from ._status_codes import codes - - -def print_help() -> None: - console = rich.console.Console() - - console.print("[bold]HTTPX :butterfly:", justify="center") - console.print() - console.print("A next generation HTTP client.", justify="center") - console.print() - console.print( - "Usage: [bold]httpx[/bold] [cyan] [OPTIONS][/cyan] ", justify="left" - ) - console.print() - - table = rich.table.Table.grid(padding=1, pad_edge=True) - table.add_column("Parameter", no_wrap=True, justify="left", style="bold") - table.add_column("Description") - table.add_row( - "-m, --method [cyan]METHOD", - "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD.\n" - "[Default: GET, or POST if a request body is included]", - ) - table.add_row( - "-p, --params [cyan] ...", - "Query parameters to include in the request URL.", - ) - table.add_row( - "-c, --content [cyan]TEXT", "Byte content to include in the request body." - ) - table.add_row( - "-d, --data [cyan] ...", "Form data to include in the request body." - ) - table.add_row( - "-f, --files [cyan] ...", - "Form files to include in the request body.", - ) - table.add_row("-j, --json [cyan]TEXT", "JSON data to include in the request body.") - table.add_row( - "-h, --headers [cyan] ...", - "Include additional HTTP headers in the request.", - ) - table.add_row( - "--cookies [cyan] ...", "Cookies to include in the request." - ) - table.add_row( - "--auth [cyan]", - "Username and password to include in the request. Specify '-' for the password" - " to use a password prompt. Note that using --verbose/-v will expose" - " the Authorization header, including the password encoding" - " in a trivially reversible format.", - ) - - table.add_row( - "--proxy [cyan]URL", - "Send the request via a proxy. Should be the URL giving the proxy address.", - ) - - table.add_row( - "--timeout [cyan]FLOAT", - "Timeout value to use for network operations, such as establishing the" - " connection, reading some data, etc... [Default: 5.0]", - ) - - table.add_row("--follow-redirects", "Automatically follow redirects.") - table.add_row("--no-verify", "Disable SSL verification.") - table.add_row( - "--http2", "Send the request using HTTP/2, if the remote server supports it." - ) - - table.add_row( - "--download [cyan]FILE", - "Save the response content as a file, rather than displaying it.", - ) - - table.add_row("-v, --verbose", "Verbose output. Show request as well as response.") - table.add_row("--help", "Show this message and exit.") - console.print(table) - - -def get_lexer_for_response(response: Response) -> str: - content_type = response.headers.get("Content-Type") - if content_type is not None: - mime_type, _, _ = content_type.partition(";") - try: - return typing.cast( - str, pygments.lexers.get_lexer_for_mimetype(mime_type.strip()).name - ) - except pygments.util.ClassNotFound: # pragma: no cover - pass - return "" # pragma: no cover - - -def format_request_headers(request: httpcore.Request, http2: bool = False) -> str: - version = "HTTP/2" if http2 else "HTTP/1.1" - headers = [ - (name.lower() if http2 else name, value) for name, value in request.headers - ] - method = request.method.decode("ascii") - target = request.url.target.decode("ascii") - lines = [f"{method} {target} {version}"] + [ - f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers - ] - return "\n".join(lines) - - -def format_response_headers( - http_version: bytes, - status: int, - reason_phrase: bytes | None, - headers: list[tuple[bytes, bytes]], -) -> str: - version = http_version.decode("ascii") - reason = ( - codes.get_reason_phrase(status) - if reason_phrase is None - else reason_phrase.decode("ascii") - ) - lines = [f"{version} {status} {reason}"] + [ - f"{name.decode('ascii')}: {value.decode('ascii')}" for name, value in headers - ] - return "\n".join(lines) - - -def print_request_headers(request: httpcore.Request, http2: bool = False) -> None: - console = rich.console.Console() - http_text = format_request_headers(request, http2=http2) - syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - - -def print_response_headers( - http_version: bytes, - status: int, - reason_phrase: bytes | None, - headers: list[tuple[bytes, bytes]], -) -> None: - console = rich.console.Console() - http_text = format_response_headers(http_version, status, reason_phrase, headers) - syntax = rich.syntax.Syntax(http_text, "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - syntax = rich.syntax.Syntax("", "http", theme="ansi_dark", word_wrap=True) - console.print(syntax) - - -def print_response(response: Response) -> None: - console = rich.console.Console() - lexer_name = get_lexer_for_response(response) - if lexer_name: - if lexer_name.lower() == "json": - try: - data = response.json() - text = json.dumps(data, indent=4) - except ValueError: # pragma: no cover - text = response.text - else: - text = response.text - - syntax = rich.syntax.Syntax(text, lexer_name, theme="ansi_dark", word_wrap=True) - console.print(syntax) - else: - console.print(f"<{len(response.content)} bytes of binary data>") - - -_PCTRTT = typing.Tuple[typing.Tuple[str, str], ...] -_PCTRTTT = typing.Tuple[_PCTRTT, ...] -_PeerCertRetDictType = typing.Dict[str, typing.Union[str, _PCTRTTT, _PCTRTT]] - - -def format_certificate(cert: _PeerCertRetDictType) -> str: # pragma: no cover - lines = [] - for key, value in cert.items(): - if isinstance(value, (list, tuple)): - lines.append(f"* {key}:") - for item in value: - if key in ("subject", "issuer"): - for sub_item in item: - lines.append(f"* {sub_item[0]}: {sub_item[1]!r}") - elif isinstance(item, tuple) and len(item) == 2: - lines.append(f"* {item[0]}: {item[1]!r}") - else: - lines.append(f"* {item!r}") - else: - lines.append(f"* {key}: {value!r}") - return "\n".join(lines) - - -def trace( - name: str, info: typing.Mapping[str, typing.Any], verbose: bool = False -) -> None: - console = rich.console.Console() - if name == "connection.connect_tcp.started" and verbose: - host = info["host"] - console.print(f"* Connecting to {host!r}") - elif name == "connection.connect_tcp.complete" and verbose: - stream = info["return_value"] - server_addr = stream.get_extra_info("server_addr") - console.print(f"* Connected to {server_addr[0]!r} on port {server_addr[1]}") - elif name == "connection.start_tls.complete" and verbose: # pragma: no cover - stream = info["return_value"] - ssl_object = stream.get_extra_info("ssl_object") - version = ssl_object.version() - cipher = ssl_object.cipher() - server_cert = ssl_object.getpeercert() - alpn = ssl_object.selected_alpn_protocol() - console.print(f"* SSL established using {version!r} / {cipher[0]!r}") - console.print(f"* Selected ALPN protocol: {alpn!r}") - if server_cert: - console.print("* Server certificate:") - console.print(format_certificate(server_cert)) - elif name == "http11.send_request_headers.started" and verbose: - request = info["request"] - print_request_headers(request, http2=False) - elif name == "http2.send_request_headers.started" and verbose: # pragma: no cover - request = info["request"] - print_request_headers(request, http2=True) - elif name == "http11.receive_response_headers.complete": - http_version, status, reason_phrase, headers = info["return_value"] - print_response_headers(http_version, status, reason_phrase, headers) - elif name == "http2.receive_response_headers.complete": # pragma: no cover - status, headers = info["return_value"] - http_version = b"HTTP/2" - reason_phrase = None - print_response_headers(http_version, status, reason_phrase, headers) - - -def download_response(response: Response, download: typing.BinaryIO) -> None: - console = rich.console.Console() - console.print() - content_length = response.headers.get("Content-Length") - with rich.progress.Progress( - "[progress.description]{task.description}", - "[progress.percentage]{task.percentage:>3.0f}%", - rich.progress.BarColumn(bar_width=None), - rich.progress.DownloadColumn(), - rich.progress.TransferSpeedColumn(), - ) as progress: - description = f"Downloading [bold]{rich.markup.escape(download.name)}" - download_task = progress.add_task( - description, - total=int(content_length or 0), - start=content_length is not None, - ) - for chunk in response.iter_bytes(): - download.write(chunk) - progress.update(download_task, completed=response.num_bytes_downloaded) - - -def validate_json( - ctx: click.Context, - param: click.Option | click.Parameter, - value: typing.Any, -) -> typing.Any: - if value is None: - return None - - try: - return json.loads(value) - except json.JSONDecodeError: # pragma: no cover - raise click.BadParameter("Not valid JSON") - - -def validate_auth( - ctx: click.Context, - param: click.Option | click.Parameter, - value: typing.Any, -) -> typing.Any: - if value == (None, None): - return None - - username, password = value - if password == "-": # pragma: no cover - password = click.prompt("Password", hide_input=True) - return (username, password) - - -def handle_help( - ctx: click.Context, - param: click.Option | click.Parameter, - value: typing.Any, -) -> None: - if not value or ctx.resilient_parsing: - return - - print_help() - ctx.exit() - - -@click.command(add_help_option=False) -@click.argument("url", type=str) -@click.option( - "--method", - "-m", - "method", - type=str, - help=( - "Request method, such as GET, POST, PUT, PATCH, DELETE, OPTIONS, HEAD. " - "[Default: GET, or POST if a request body is included]" - ), -) -@click.option( - "--params", - "-p", - "params", - type=(str, str), - multiple=True, - help="Query parameters to include in the request URL.", -) -@click.option( - "--content", - "-c", - "content", - type=str, - help="Byte content to include in the request body.", -) -@click.option( - "--data", - "-d", - "data", - type=(str, str), - multiple=True, - help="Form data to include in the request body.", -) -@click.option( - "--files", - "-f", - "files", - type=(str, click.File(mode="rb")), - multiple=True, - help="Form files to include in the request body.", -) -@click.option( - "--json", - "-j", - "json", - type=str, - callback=validate_json, - help="JSON data to include in the request body.", -) -@click.option( - "--headers", - "-h", - "headers", - type=(str, str), - multiple=True, - help="Include additional HTTP headers in the request.", -) -@click.option( - "--cookies", - "cookies", - type=(str, str), - multiple=True, - help="Cookies to include in the request.", -) -@click.option( - "--auth", - "auth", - type=(str, str), - default=(None, None), - callback=validate_auth, - help=( - "Username and password to include in the request. " - "Specify '-' for the password to use a password prompt. " - "Note that using --verbose/-v will expose the Authorization header, " - "including the password encoding in a trivially reversible format." - ), -) -@click.option( - "--proxy", - "proxy", - type=str, - default=None, - help="Send the request via a proxy. Should be the URL giving the proxy address.", -) -@click.option( - "--timeout", - "timeout", - type=float, - default=5.0, - help=( - "Timeout value to use for network operations, such as establishing the " - "connection, reading some data, etc... [Default: 5.0]" - ), -) -@click.option( - "--follow-redirects", - "follow_redirects", - is_flag=True, - default=False, - help="Automatically follow redirects.", -) -@click.option( - "--no-verify", - "verify", - is_flag=True, - default=True, - help="Disable SSL verification.", -) -@click.option( - "--http2", - "http2", - type=bool, - is_flag=True, - default=False, - help="Send the request using HTTP/2, if the remote server supports it.", -) -@click.option( - "--download", - type=click.File("wb"), - help="Save the response content as a file, rather than displaying it.", -) -@click.option( - "--verbose", - "-v", - type=bool, - is_flag=True, - default=False, - help="Verbose. Show request as well as response.", -) -@click.option( - "--help", - is_flag=True, - is_eager=True, - expose_value=False, - callback=handle_help, - help="Show this message and exit.", -) -def main( - url: str, - method: str, - params: list[tuple[str, str]], - content: str, - data: list[tuple[str, str]], - files: list[tuple[str, click.File]], - json: str, - headers: list[tuple[str, str]], - cookies: list[tuple[str, str]], - auth: tuple[str, str] | None, - proxy: str, - timeout: float, - follow_redirects: bool, - verify: bool, - http2: bool, - download: typing.BinaryIO | None, - verbose: bool, -) -> None: - """ - An HTTP command line client. - Sends a request and displays the response. - """ - if not method: - method = "POST" if content or data or files or json else "GET" - - try: - with Client( - proxy=proxy, - timeout=timeout, - verify=verify, - http2=http2, - ) as client: - with client.stream( - method, - url, - params=list(params), - content=content, - data=dict(data), - files=files, # type: ignore - json=json, - headers=headers, - cookies=dict(cookies), - auth=auth, - follow_redirects=follow_redirects, - extensions={"trace": functools.partial(trace, verbose=verbose)}, - ) as response: - if download is not None: - download_response(response, download) - else: - response.read() - if response.content: - print_response(response) - - except RequestError as exc: - console = rich.console.Console() - console.print(f"[red]{type(exc).__name__}[/red]: {exc}") - sys.exit(1) - - sys.exit(0 if response.is_success else 1) diff --git a/myenv/Lib/site-packages/httpx/_models.py b/myenv/Lib/site-packages/httpx/_models.py deleted file mode 100644 index 01d9583..0000000 --- a/myenv/Lib/site-packages/httpx/_models.py +++ /dev/null @@ -1,1211 +0,0 @@ -from __future__ import annotations - -import datetime -import email.message -import json as jsonlib -import typing -import urllib.request -from collections.abc import Mapping -from http.cookiejar import Cookie, CookieJar - -from ._content import ByteStream, UnattachedStream, encode_request, encode_response -from ._decoders import ( - SUPPORTED_DECODERS, - ByteChunker, - ContentDecoder, - IdentityDecoder, - LineDecoder, - MultiDecoder, - TextChunker, - TextDecoder, -) -from ._exceptions import ( - CookieConflict, - HTTPStatusError, - RequestNotRead, - ResponseNotRead, - StreamClosed, - StreamConsumed, - request_context, -) -from ._multipart import get_multipart_boundary_from_content_type -from ._status_codes import codes -from ._types import ( - AsyncByteStream, - CookieTypes, - HeaderTypes, - QueryParamTypes, - RequestContent, - RequestData, - RequestExtensions, - RequestFiles, - ResponseContent, - ResponseExtensions, - SyncByteStream, -) -from ._urls import URL -from ._utils import ( - is_known_encoding, - normalize_header_key, - normalize_header_value, - obfuscate_sensitive_headers, - parse_content_type_charset, - parse_header_links, -) - -__all__ = ["Cookies", "Headers", "Request", "Response"] - - -class Headers(typing.MutableMapping[str, str]): - """ - HTTP headers, as a case-insensitive multi-dict. - """ - - def __init__( - self, - headers: HeaderTypes | None = None, - encoding: str | None = None, - ) -> None: - if headers is None: - self._list = [] # type: typing.List[typing.Tuple[bytes, bytes, bytes]] - elif isinstance(headers, Headers): - self._list = list(headers._list) - elif isinstance(headers, Mapping): - self._list = [ - ( - normalize_header_key(k, lower=False, encoding=encoding), - normalize_header_key(k, lower=True, encoding=encoding), - normalize_header_value(v, encoding), - ) - for k, v in headers.items() - ] - else: - self._list = [ - ( - normalize_header_key(k, lower=False, encoding=encoding), - normalize_header_key(k, lower=True, encoding=encoding), - normalize_header_value(v, encoding), - ) - for k, v in headers - ] - - self._encoding = encoding - - @property - def encoding(self) -> str: - """ - Header encoding is mandated as ascii, but we allow fallbacks to utf-8 - or iso-8859-1. - """ - if self._encoding is None: - for encoding in ["ascii", "utf-8"]: - for key, value in self.raw: - try: - key.decode(encoding) - value.decode(encoding) - except UnicodeDecodeError: - break - else: - # The else block runs if 'break' did not occur, meaning - # all values fitted the encoding. - self._encoding = encoding - break - else: - # The ISO-8859-1 encoding covers all 256 code points in a byte, - # so will never raise decode errors. - self._encoding = "iso-8859-1" - return self._encoding - - @encoding.setter - def encoding(self, value: str) -> None: - self._encoding = value - - @property - def raw(self) -> list[tuple[bytes, bytes]]: - """ - Returns a list of the raw header items, as byte pairs. - """ - return [(raw_key, value) for raw_key, _, value in self._list] - - def keys(self) -> typing.KeysView[str]: - return {key.decode(self.encoding): None for _, key, value in self._list}.keys() - - def values(self) -> typing.ValuesView[str]: - values_dict: dict[str, str] = {} - for _, key, value in self._list: - str_key = key.decode(self.encoding) - str_value = value.decode(self.encoding) - if str_key in values_dict: - values_dict[str_key] += f", {str_value}" - else: - values_dict[str_key] = str_value - return values_dict.values() - - def items(self) -> typing.ItemsView[str, str]: - """ - Return `(key, value)` items of headers. Concatenate headers - into a single comma separated value when a key occurs multiple times. - """ - values_dict: dict[str, str] = {} - for _, key, value in self._list: - str_key = key.decode(self.encoding) - str_value = value.decode(self.encoding) - if str_key in values_dict: - values_dict[str_key] += f", {str_value}" - else: - values_dict[str_key] = str_value - return values_dict.items() - - def multi_items(self) -> list[tuple[str, str]]: - """ - Return a list of `(key, value)` pairs of headers. Allow multiple - occurrences of the same key without concatenating into a single - comma separated value. - """ - return [ - (key.decode(self.encoding), value.decode(self.encoding)) - for _, key, value in self._list - ] - - def get(self, key: str, default: typing.Any = None) -> typing.Any: - """ - Return a header value. If multiple occurrences of the header occur - then concatenate them together with commas. - """ - try: - return self[key] - except KeyError: - return default - - def get_list(self, key: str, split_commas: bool = False) -> list[str]: - """ - Return a list of all header values for a given key. - If `split_commas=True` is passed, then any comma separated header - values are split into multiple return strings. - """ - get_header_key = key.lower().encode(self.encoding) - - values = [ - item_value.decode(self.encoding) - for _, item_key, item_value in self._list - if item_key.lower() == get_header_key - ] - - if not split_commas: - return values - - split_values = [] - for value in values: - split_values.extend([item.strip() for item in value.split(",")]) - return split_values - - def update(self, headers: HeaderTypes | None = None) -> None: # type: ignore - headers = Headers(headers) - for key in headers.keys(): - if key in self: - self.pop(key) - self._list.extend(headers._list) - - def copy(self) -> Headers: - return Headers(self, encoding=self.encoding) - - def __getitem__(self, key: str) -> str: - """ - Return a single header value. - - If there are multiple headers with the same key, then we concatenate - them with commas. See: https://tools.ietf.org/html/rfc7230#section-3.2.2 - """ - normalized_key = key.lower().encode(self.encoding) - - items = [ - header_value.decode(self.encoding) - for _, header_key, header_value in self._list - if header_key == normalized_key - ] - - if items: - return ", ".join(items) - - raise KeyError(key) - - def __setitem__(self, key: str, value: str) -> None: - """ - Set the header `key` to `value`, removing any duplicate entries. - Retains insertion order. - """ - set_key = key.encode(self._encoding or "utf-8") - set_value = value.encode(self._encoding or "utf-8") - lookup_key = set_key.lower() - - found_indexes = [ - idx - for idx, (_, item_key, _) in enumerate(self._list) - if item_key == lookup_key - ] - - for idx in reversed(found_indexes[1:]): - del self._list[idx] - - if found_indexes: - idx = found_indexes[0] - self._list[idx] = (set_key, lookup_key, set_value) - else: - self._list.append((set_key, lookup_key, set_value)) - - def __delitem__(self, key: str) -> None: - """ - Remove the header `key`. - """ - del_key = key.lower().encode(self.encoding) - - pop_indexes = [ - idx - for idx, (_, item_key, _) in enumerate(self._list) - if item_key.lower() == del_key - ] - - if not pop_indexes: - raise KeyError(key) - - for idx in reversed(pop_indexes): - del self._list[idx] - - def __contains__(self, key: typing.Any) -> bool: - header_key = key.lower().encode(self.encoding) - return header_key in [key for _, key, _ in self._list] - - def __iter__(self) -> typing.Iterator[typing.Any]: - return iter(self.keys()) - - def __len__(self) -> int: - return len(self._list) - - def __eq__(self, other: typing.Any) -> bool: - try: - other_headers = Headers(other) - except ValueError: - return False - - self_list = [(key, value) for _, key, value in self._list] - other_list = [(key, value) for _, key, value in other_headers._list] - return sorted(self_list) == sorted(other_list) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - - encoding_str = "" - if self.encoding != "ascii": - encoding_str = f", encoding={self.encoding!r}" - - as_list = list(obfuscate_sensitive_headers(self.multi_items())) - as_dict = dict(as_list) - - no_duplicate_keys = len(as_dict) == len(as_list) - if no_duplicate_keys: - return f"{class_name}({as_dict!r}{encoding_str})" - return f"{class_name}({as_list!r}{encoding_str})" - - -class Request: - def __init__( - self, - method: str | bytes, - url: URL | str, - *, - params: QueryParamTypes | None = None, - headers: HeaderTypes | None = None, - cookies: CookieTypes | None = None, - content: RequestContent | None = None, - data: RequestData | None = None, - files: RequestFiles | None = None, - json: typing.Any | None = None, - stream: SyncByteStream | AsyncByteStream | None = None, - extensions: RequestExtensions | None = None, - ) -> None: - self.method = ( - method.decode("ascii").upper() - if isinstance(method, bytes) - else method.upper() - ) - self.url = URL(url) - if params is not None: - self.url = self.url.copy_merge_params(params=params) - self.headers = Headers(headers) - self.extensions = {} if extensions is None else extensions - - if cookies: - Cookies(cookies).set_cookie_header(self) - - if stream is None: - content_type: str | None = self.headers.get("content-type") - headers, stream = encode_request( - content=content, - data=data, - files=files, - json=json, - boundary=get_multipart_boundary_from_content_type( - content_type=content_type.encode(self.headers.encoding) - if content_type - else None - ), - ) - self._prepare(headers) - self.stream = stream - # Load the request body, except for streaming content. - if isinstance(stream, ByteStream): - self.read() - else: - # There's an important distinction between `Request(content=...)`, - # and `Request(stream=...)`. - # - # Using `content=...` implies automatically populated `Host` and content - # headers, of either `Content-Length: ...` or `Transfer-Encoding: chunked`. - # - # Using `stream=...` will not automatically include *any* - # auto-populated headers. - # - # As an end-user you don't really need `stream=...`. It's only - # useful when: - # - # * Preserving the request stream when copying requests, eg for redirects. - # * Creating request instances on the *server-side* of the transport API. - self.stream = stream - - def _prepare(self, default_headers: dict[str, str]) -> None: - for key, value in default_headers.items(): - # Ignore Transfer-Encoding if the Content-Length has been set explicitly. - if key.lower() == "transfer-encoding" and "Content-Length" in self.headers: - continue - self.headers.setdefault(key, value) - - auto_headers: list[tuple[bytes, bytes]] = [] - - has_host = "Host" in self.headers - has_content_length = ( - "Content-Length" in self.headers or "Transfer-Encoding" in self.headers - ) - - if not has_host and self.url.host: - auto_headers.append((b"Host", self.url.netloc)) - if not has_content_length and self.method in ("POST", "PUT", "PATCH"): - auto_headers.append((b"Content-Length", b"0")) - - self.headers = Headers(auto_headers + self.headers.raw) - - @property - def content(self) -> bytes: - if not hasattr(self, "_content"): - raise RequestNotRead() - return self._content - - def read(self) -> bytes: - """ - Read and return the request content. - """ - if not hasattr(self, "_content"): - assert isinstance(self.stream, typing.Iterable) - self._content = b"".join(self.stream) - if not isinstance(self.stream, ByteStream): - # If a streaming request has been read entirely into memory, then - # we can replace the stream with a raw bytes implementation, - # to ensure that any non-replayable streams can still be used. - self.stream = ByteStream(self._content) - return self._content - - async def aread(self) -> bytes: - """ - Read and return the request content. - """ - if not hasattr(self, "_content"): - assert isinstance(self.stream, typing.AsyncIterable) - self._content = b"".join([part async for part in self.stream]) - if not isinstance(self.stream, ByteStream): - # If a streaming request has been read entirely into memory, then - # we can replace the stream with a raw bytes implementation, - # to ensure that any non-replayable streams can still be used. - self.stream = ByteStream(self._content) - return self._content - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - url = str(self.url) - return f"<{class_name}({self.method!r}, {url!r})>" - - def __getstate__(self) -> dict[str, typing.Any]: - return { - name: value - for name, value in self.__dict__.items() - if name not in ["extensions", "stream"] - } - - def __setstate__(self, state: dict[str, typing.Any]) -> None: - for name, value in state.items(): - setattr(self, name, value) - self.extensions = {} - self.stream = UnattachedStream() - - -class Response: - def __init__( - self, - status_code: int, - *, - headers: HeaderTypes | None = None, - content: ResponseContent | None = None, - text: str | None = None, - html: str | None = None, - json: typing.Any = None, - stream: SyncByteStream | AsyncByteStream | None = None, - request: Request | None = None, - extensions: ResponseExtensions | None = None, - history: list[Response] | None = None, - default_encoding: str | typing.Callable[[bytes], str] = "utf-8", - ) -> None: - self.status_code = status_code - self.headers = Headers(headers) - - self._request: Request | None = request - - # When follow_redirects=False and a redirect is received, - # the client will set `response.next_request`. - self.next_request: Request | None = None - - self.extensions: ResponseExtensions = {} if extensions is None else extensions - self.history = [] if history is None else list(history) - - self.is_closed = False - self.is_stream_consumed = False - - self.default_encoding = default_encoding - - if stream is None: - headers, stream = encode_response(content, text, html, json) - self._prepare(headers) - self.stream = stream - if isinstance(stream, ByteStream): - # Load the response body, except for streaming content. - self.read() - else: - # There's an important distinction between `Response(content=...)`, - # and `Response(stream=...)`. - # - # Using `content=...` implies automatically populated content headers, - # of either `Content-Length: ...` or `Transfer-Encoding: chunked`. - # - # Using `stream=...` will not automatically include any content headers. - # - # As an end-user you don't really need `stream=...`. It's only - # useful when creating response instances having received a stream - # from the transport API. - self.stream = stream - - self._num_bytes_downloaded = 0 - - def _prepare(self, default_headers: dict[str, str]) -> None: - for key, value in default_headers.items(): - # Ignore Transfer-Encoding if the Content-Length has been set explicitly. - if key.lower() == "transfer-encoding" and "content-length" in self.headers: - continue - self.headers.setdefault(key, value) - - @property - def elapsed(self) -> datetime.timedelta: - """ - Returns the time taken for the complete request/response - cycle to complete. - """ - if not hasattr(self, "_elapsed"): - raise RuntimeError( - "'.elapsed' may only be accessed after the response " - "has been read or closed." - ) - return self._elapsed - - @elapsed.setter - def elapsed(self, elapsed: datetime.timedelta) -> None: - self._elapsed = elapsed - - @property - def request(self) -> Request: - """ - Returns the request instance associated to the current response. - """ - if self._request is None: - raise RuntimeError( - "The request instance has not been set on this response." - ) - return self._request - - @request.setter - def request(self, value: Request) -> None: - self._request = value - - @property - def http_version(self) -> str: - try: - http_version: bytes = self.extensions["http_version"] - except KeyError: - return "HTTP/1.1" - else: - return http_version.decode("ascii", errors="ignore") - - @property - def reason_phrase(self) -> str: - try: - reason_phrase: bytes = self.extensions["reason_phrase"] - except KeyError: - return codes.get_reason_phrase(self.status_code) - else: - return reason_phrase.decode("ascii", errors="ignore") - - @property - def url(self) -> URL: - """ - Returns the URL for which the request was made. - """ - return self.request.url - - @property - def content(self) -> bytes: - if not hasattr(self, "_content"): - raise ResponseNotRead() - return self._content - - @property - def text(self) -> str: - if not hasattr(self, "_text"): - content = self.content - if not content: - self._text = "" - else: - decoder = TextDecoder(encoding=self.encoding or "utf-8") - self._text = "".join([decoder.decode(self.content), decoder.flush()]) - return self._text - - @property - def encoding(self) -> str | None: - """ - Return an encoding to use for decoding the byte content into text. - The priority for determining this is given by... - - * `.encoding = <>` has been set explicitly. - * The encoding as specified by the charset parameter in the Content-Type header. - * The encoding as determined by `default_encoding`, which may either be - a string like "utf-8" indicating the encoding to use, or may be a callable - which enables charset autodetection. - """ - if not hasattr(self, "_encoding"): - encoding = self.charset_encoding - if encoding is None or not is_known_encoding(encoding): - if isinstance(self.default_encoding, str): - encoding = self.default_encoding - elif hasattr(self, "_content"): - encoding = self.default_encoding(self._content) - self._encoding = encoding or "utf-8" - return self._encoding - - @encoding.setter - def encoding(self, value: str) -> None: - """ - Set the encoding to use for decoding the byte content into text. - - If the `text` attribute has been accessed, attempting to set the - encoding will throw a ValueError. - """ - if hasattr(self, "_text"): - raise ValueError( - "Setting encoding after `text` has been accessed is not allowed." - ) - self._encoding = value - - @property - def charset_encoding(self) -> str | None: - """ - Return the encoding, as specified by the Content-Type header. - """ - content_type = self.headers.get("Content-Type") - if content_type is None: - return None - - return parse_content_type_charset(content_type) - - def _get_content_decoder(self) -> ContentDecoder: - """ - Returns a decoder instance which can be used to decode the raw byte - content, depending on the Content-Encoding used in the response. - """ - if not hasattr(self, "_decoder"): - decoders: list[ContentDecoder] = [] - values = self.headers.get_list("content-encoding", split_commas=True) - for value in values: - value = value.strip().lower() - try: - decoder_cls = SUPPORTED_DECODERS[value] - decoders.append(decoder_cls()) - except KeyError: - continue - - if len(decoders) == 1: - self._decoder = decoders[0] - elif len(decoders) > 1: - self._decoder = MultiDecoder(children=decoders) - else: - self._decoder = IdentityDecoder() - - return self._decoder - - @property - def is_informational(self) -> bool: - """ - A property which is `True` for 1xx status codes, `False` otherwise. - """ - return codes.is_informational(self.status_code) - - @property - def is_success(self) -> bool: - """ - A property which is `True` for 2xx status codes, `False` otherwise. - """ - return codes.is_success(self.status_code) - - @property - def is_redirect(self) -> bool: - """ - A property which is `True` for 3xx status codes, `False` otherwise. - - Note that not all responses with a 3xx status code indicate a URL redirect. - - Use `response.has_redirect_location` to determine responses with a properly - formed URL redirection. - """ - return codes.is_redirect(self.status_code) - - @property - def is_client_error(self) -> bool: - """ - A property which is `True` for 4xx status codes, `False` otherwise. - """ - return codes.is_client_error(self.status_code) - - @property - def is_server_error(self) -> bool: - """ - A property which is `True` for 5xx status codes, `False` otherwise. - """ - return codes.is_server_error(self.status_code) - - @property - def is_error(self) -> bool: - """ - A property which is `True` for 4xx and 5xx status codes, `False` otherwise. - """ - return codes.is_error(self.status_code) - - @property - def has_redirect_location(self) -> bool: - """ - Returns True for 3xx responses with a properly formed URL redirection, - `False` otherwise. - """ - return ( - self.status_code - in ( - # 301 (Cacheable redirect. Method may change to GET.) - codes.MOVED_PERMANENTLY, - # 302 (Uncacheable redirect. Method may change to GET.) - codes.FOUND, - # 303 (Client should make a GET or HEAD request.) - codes.SEE_OTHER, - # 307 (Equiv. 302, but retain method) - codes.TEMPORARY_REDIRECT, - # 308 (Equiv. 301, but retain method) - codes.PERMANENT_REDIRECT, - ) - and "Location" in self.headers - ) - - def raise_for_status(self) -> Response: - """ - Raise the `HTTPStatusError` if one occurred. - """ - request = self._request - if request is None: - raise RuntimeError( - "Cannot call `raise_for_status` as the request " - "instance has not been set on this response." - ) - - if self.is_success: - return self - - if self.has_redirect_location: - message = ( - "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" - "Redirect location: '{0.headers[location]}'\n" - "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" - ) - else: - message = ( - "{error_type} '{0.status_code} {0.reason_phrase}' for url '{0.url}'\n" - "For more information check: https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/{0.status_code}" - ) - - status_class = self.status_code // 100 - error_types = { - 1: "Informational response", - 3: "Redirect response", - 4: "Client error", - 5: "Server error", - } - error_type = error_types.get(status_class, "Invalid status code") - message = message.format(self, error_type=error_type) - raise HTTPStatusError(message, request=request, response=self) - - def json(self, **kwargs: typing.Any) -> typing.Any: - return jsonlib.loads(self.content, **kwargs) - - @property - def cookies(self) -> Cookies: - if not hasattr(self, "_cookies"): - self._cookies = Cookies() - self._cookies.extract_cookies(self) - return self._cookies - - @property - def links(self) -> dict[str | None, dict[str, str]]: - """ - Returns the parsed header links of the response, if any - """ - header = self.headers.get("link") - if header is None: - return {} - - return { - (link.get("rel") or link.get("url")): link - for link in parse_header_links(header) - } - - @property - def num_bytes_downloaded(self) -> int: - return self._num_bytes_downloaded - - def __repr__(self) -> str: - return f"" - - def __getstate__(self) -> dict[str, typing.Any]: - return { - name: value - for name, value in self.__dict__.items() - if name not in ["extensions", "stream", "is_closed", "_decoder"] - } - - def __setstate__(self, state: dict[str, typing.Any]) -> None: - for name, value in state.items(): - setattr(self, name, value) - self.is_closed = True - self.extensions = {} - self.stream = UnattachedStream() - - def read(self) -> bytes: - """ - Read and return the response content. - """ - if not hasattr(self, "_content"): - self._content = b"".join(self.iter_bytes()) - return self._content - - def iter_bytes(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: - """ - A byte-iterator over the decoded response content. - This allows us to handle gzip, deflate, brotli, and zstd encoded responses. - """ - if hasattr(self, "_content"): - chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), max(chunk_size, 1)): - yield self._content[i : i + chunk_size] - else: - decoder = self._get_content_decoder() - chunker = ByteChunker(chunk_size=chunk_size) - with request_context(request=self._request): - for raw_bytes in self.iter_raw(): - decoded = decoder.decode(raw_bytes) - for chunk in chunker.decode(decoded): - yield chunk - decoded = decoder.flush() - for chunk in chunker.decode(decoded): - yield chunk # pragma: no cover - for chunk in chunker.flush(): - yield chunk - - def iter_text(self, chunk_size: int | None = None) -> typing.Iterator[str]: - """ - A str-iterator over the decoded response content - that handles both gzip, deflate, etc but also detects the content's - string encoding. - """ - decoder = TextDecoder(encoding=self.encoding or "utf-8") - chunker = TextChunker(chunk_size=chunk_size) - with request_context(request=self._request): - for byte_content in self.iter_bytes(): - text_content = decoder.decode(byte_content) - for chunk in chunker.decode(text_content): - yield chunk - text_content = decoder.flush() - for chunk in chunker.decode(text_content): - yield chunk # pragma: no cover - for chunk in chunker.flush(): - yield chunk - - def iter_lines(self) -> typing.Iterator[str]: - decoder = LineDecoder() - with request_context(request=self._request): - for text in self.iter_text(): - for line in decoder.decode(text): - yield line - for line in decoder.flush(): - yield line - - def iter_raw(self, chunk_size: int | None = None) -> typing.Iterator[bytes]: - """ - A byte-iterator over the raw response content. - """ - if self.is_stream_consumed: - raise StreamConsumed() - if self.is_closed: - raise StreamClosed() - if not isinstance(self.stream, SyncByteStream): - raise RuntimeError("Attempted to call a sync iterator on an async stream.") - - self.is_stream_consumed = True - self._num_bytes_downloaded = 0 - chunker = ByteChunker(chunk_size=chunk_size) - - with request_context(request=self._request): - for raw_stream_bytes in self.stream: - self._num_bytes_downloaded += len(raw_stream_bytes) - for chunk in chunker.decode(raw_stream_bytes): - yield chunk - - for chunk in chunker.flush(): - yield chunk - - self.close() - - def close(self) -> None: - """ - Close the response and release the connection. - Automatically called if the response body is read to completion. - """ - if not isinstance(self.stream, SyncByteStream): - raise RuntimeError("Attempted to call an sync close on an async stream.") - - if not self.is_closed: - self.is_closed = True - with request_context(request=self._request): - self.stream.close() - - async def aread(self) -> bytes: - """ - Read and return the response content. - """ - if not hasattr(self, "_content"): - self._content = b"".join([part async for part in self.aiter_bytes()]) - return self._content - - async def aiter_bytes( - self, chunk_size: int | None = None - ) -> typing.AsyncIterator[bytes]: - """ - A byte-iterator over the decoded response content. - This allows us to handle gzip, deflate, brotli, and zstd encoded responses. - """ - if hasattr(self, "_content"): - chunk_size = len(self._content) if chunk_size is None else chunk_size - for i in range(0, len(self._content), max(chunk_size, 1)): - yield self._content[i : i + chunk_size] - else: - decoder = self._get_content_decoder() - chunker = ByteChunker(chunk_size=chunk_size) - with request_context(request=self._request): - async for raw_bytes in self.aiter_raw(): - decoded = decoder.decode(raw_bytes) - for chunk in chunker.decode(decoded): - yield chunk - decoded = decoder.flush() - for chunk in chunker.decode(decoded): - yield chunk # pragma: no cover - for chunk in chunker.flush(): - yield chunk - - async def aiter_text( - self, chunk_size: int | None = None - ) -> typing.AsyncIterator[str]: - """ - A str-iterator over the decoded response content - that handles both gzip, deflate, etc but also detects the content's - string encoding. - """ - decoder = TextDecoder(encoding=self.encoding or "utf-8") - chunker = TextChunker(chunk_size=chunk_size) - with request_context(request=self._request): - async for byte_content in self.aiter_bytes(): - text_content = decoder.decode(byte_content) - for chunk in chunker.decode(text_content): - yield chunk - text_content = decoder.flush() - for chunk in chunker.decode(text_content): - yield chunk # pragma: no cover - for chunk in chunker.flush(): - yield chunk - - async def aiter_lines(self) -> typing.AsyncIterator[str]: - decoder = LineDecoder() - with request_context(request=self._request): - async for text in self.aiter_text(): - for line in decoder.decode(text): - yield line - for line in decoder.flush(): - yield line - - async def aiter_raw( - self, chunk_size: int | None = None - ) -> typing.AsyncIterator[bytes]: - """ - A byte-iterator over the raw response content. - """ - if self.is_stream_consumed: - raise StreamConsumed() - if self.is_closed: - raise StreamClosed() - if not isinstance(self.stream, AsyncByteStream): - raise RuntimeError("Attempted to call an async iterator on an sync stream.") - - self.is_stream_consumed = True - self._num_bytes_downloaded = 0 - chunker = ByteChunker(chunk_size=chunk_size) - - with request_context(request=self._request): - async for raw_stream_bytes in self.stream: - self._num_bytes_downloaded += len(raw_stream_bytes) - for chunk in chunker.decode(raw_stream_bytes): - yield chunk - - for chunk in chunker.flush(): - yield chunk - - await self.aclose() - - async def aclose(self) -> None: - """ - Close the response and release the connection. - Automatically called if the response body is read to completion. - """ - if not isinstance(self.stream, AsyncByteStream): - raise RuntimeError("Attempted to call an async close on an sync stream.") - - if not self.is_closed: - self.is_closed = True - with request_context(request=self._request): - await self.stream.aclose() - - -class Cookies(typing.MutableMapping[str, str]): - """ - HTTP Cookies, as a mutable mapping. - """ - - def __init__(self, cookies: CookieTypes | None = None) -> None: - if cookies is None or isinstance(cookies, dict): - self.jar = CookieJar() - if isinstance(cookies, dict): - for key, value in cookies.items(): - self.set(key, value) - elif isinstance(cookies, list): - self.jar = CookieJar() - for key, value in cookies: - self.set(key, value) - elif isinstance(cookies, Cookies): - self.jar = CookieJar() - for cookie in cookies.jar: - self.jar.set_cookie(cookie) - else: - self.jar = cookies - - def extract_cookies(self, response: Response) -> None: - """ - Loads any cookies based on the response `Set-Cookie` headers. - """ - urllib_response = self._CookieCompatResponse(response) - urllib_request = self._CookieCompatRequest(response.request) - - self.jar.extract_cookies(urllib_response, urllib_request) # type: ignore - - def set_cookie_header(self, request: Request) -> None: - """ - Sets an appropriate 'Cookie:' HTTP header on the `Request`. - """ - urllib_request = self._CookieCompatRequest(request) - self.jar.add_cookie_header(urllib_request) - - def set(self, name: str, value: str, domain: str = "", path: str = "/") -> None: - """ - Set a cookie value by name. May optionally include domain and path. - """ - kwargs = { - "version": 0, - "name": name, - "value": value, - "port": None, - "port_specified": False, - "domain": domain, - "domain_specified": bool(domain), - "domain_initial_dot": domain.startswith("."), - "path": path, - "path_specified": bool(path), - "secure": False, - "expires": None, - "discard": True, - "comment": None, - "comment_url": None, - "rest": {"HttpOnly": None}, - "rfc2109": False, - } - cookie = Cookie(**kwargs) # type: ignore - self.jar.set_cookie(cookie) - - def get( # type: ignore - self, - name: str, - default: str | None = None, - domain: str | None = None, - path: str | None = None, - ) -> str | None: - """ - Get a cookie by name. May optionally include domain and path - in order to specify exactly which cookie to retrieve. - """ - value = None - for cookie in self.jar: - if cookie.name == name: - if domain is None or cookie.domain == domain: - if path is None or cookie.path == path: - if value is not None: - message = f"Multiple cookies exist with name={name}" - raise CookieConflict(message) - value = cookie.value - - if value is None: - return default - return value - - def delete( - self, - name: str, - domain: str | None = None, - path: str | None = None, - ) -> None: - """ - Delete a cookie by name. May optionally include domain and path - in order to specify exactly which cookie to delete. - """ - if domain is not None and path is not None: - return self.jar.clear(domain, path, name) - - remove = [ - cookie - for cookie in self.jar - if cookie.name == name - and (domain is None or cookie.domain == domain) - and (path is None or cookie.path == path) - ] - - for cookie in remove: - self.jar.clear(cookie.domain, cookie.path, cookie.name) - - def clear(self, domain: str | None = None, path: str | None = None) -> None: - """ - Delete all cookies. Optionally include a domain and path in - order to only delete a subset of all the cookies. - """ - args = [] - if domain is not None: - args.append(domain) - if path is not None: - assert domain is not None - args.append(path) - self.jar.clear(*args) - - def update(self, cookies: CookieTypes | None = None) -> None: # type: ignore - cookies = Cookies(cookies) - for cookie in cookies.jar: - self.jar.set_cookie(cookie) - - def __setitem__(self, name: str, value: str) -> None: - return self.set(name, value) - - def __getitem__(self, name: str) -> str: - value = self.get(name) - if value is None: - raise KeyError(name) - return value - - def __delitem__(self, name: str) -> None: - return self.delete(name) - - def __len__(self) -> int: - return len(self.jar) - - def __iter__(self) -> typing.Iterator[str]: - return (cookie.name for cookie in self.jar) - - def __bool__(self) -> bool: - for _ in self.jar: - return True - return False - - def __repr__(self) -> str: - cookies_repr = ", ".join( - [ - f"" - for cookie in self.jar - ] - ) - - return f"" - - class _CookieCompatRequest(urllib.request.Request): - """ - Wraps a `Request` instance up in a compatibility interface suitable - for use with `CookieJar` operations. - """ - - def __init__(self, request: Request) -> None: - super().__init__( - url=str(request.url), - headers=dict(request.headers), - method=request.method, - ) - self.request = request - - def add_unredirected_header(self, key: str, value: str) -> None: - super().add_unredirected_header(key, value) - self.request.headers[key] = value - - class _CookieCompatResponse: - """ - Wraps a `Request` instance up in a compatibility interface suitable - for use with `CookieJar` operations. - """ - - def __init__(self, response: Response) -> None: - self.response = response - - def info(self) -> email.message.Message: - info = email.message.Message() - for key, value in self.response.headers.multi_items(): - # Note that setting `info[key]` here is an "append" operation, - # not a "replace" operation. - # https://docs.python.org/3/library/email.compat32-message.html#email.message.Message.__setitem__ - info[key] = value - return info diff --git a/myenv/Lib/site-packages/httpx/_multipart.py b/myenv/Lib/site-packages/httpx/_multipart.py deleted file mode 100644 index 8edb622..0000000 --- a/myenv/Lib/site-packages/httpx/_multipart.py +++ /dev/null @@ -1,269 +0,0 @@ -from __future__ import annotations - -import io -import os -import typing -from pathlib import Path - -from ._types import ( - AsyncByteStream, - FileContent, - FileTypes, - RequestData, - RequestFiles, - SyncByteStream, -) -from ._utils import ( - format_form_param, - guess_content_type, - peek_filelike_length, - primitive_value_to_str, - to_bytes, -) - - -def get_multipart_boundary_from_content_type( - content_type: bytes | None, -) -> bytes | None: - if not content_type or not content_type.startswith(b"multipart/form-data"): - return None - # parse boundary according to - # https://www.rfc-editor.org/rfc/rfc2046#section-5.1.1 - if b";" in content_type: - for section in content_type.split(b";"): - if section.strip().lower().startswith(b"boundary="): - return section.strip()[len(b"boundary=") :].strip(b'"') - return None - - -class DataField: - """ - A single form field item, within a multipart form field. - """ - - def __init__(self, name: str, value: str | bytes | int | float | None) -> None: - if not isinstance(name, str): - raise TypeError( - f"Invalid type for name. Expected str, got {type(name)}: {name!r}" - ) - if value is not None and not isinstance(value, (str, bytes, int, float)): - raise TypeError( - "Invalid type for value. Expected primitive type," - f" got {type(value)}: {value!r}" - ) - self.name = name - self.value: str | bytes = ( - value if isinstance(value, bytes) else primitive_value_to_str(value) - ) - - def render_headers(self) -> bytes: - if not hasattr(self, "_headers"): - name = format_form_param("name", self.name) - self._headers = b"".join( - [b"Content-Disposition: form-data; ", name, b"\r\n\r\n"] - ) - - return self._headers - - def render_data(self) -> bytes: - if not hasattr(self, "_data"): - self._data = to_bytes(self.value) - - return self._data - - def get_length(self) -> int: - headers = self.render_headers() - data = self.render_data() - return len(headers) + len(data) - - def render(self) -> typing.Iterator[bytes]: - yield self.render_headers() - yield self.render_data() - - -class FileField: - """ - A single file field item, within a multipart form field. - """ - - CHUNK_SIZE = 64 * 1024 - - def __init__(self, name: str, value: FileTypes) -> None: - self.name = name - - fileobj: FileContent - - headers: dict[str, str] = {} - content_type: str | None = None - - # This large tuple based API largely mirror's requests' API - # It would be good to think of better APIs for this that we could - # include in httpx 2.0 since variable length tuples(especially of 4 elements) - # are quite unwieldly - if isinstance(value, tuple): - if len(value) == 2: - # neither the 3rd parameter (content_type) nor the 4th (headers) - # was included - filename, fileobj = value - elif len(value) == 3: - filename, fileobj, content_type = value - else: - # all 4 parameters included - filename, fileobj, content_type, headers = value # type: ignore - else: - filename = Path(str(getattr(value, "name", "upload"))).name - fileobj = value - - if content_type is None: - content_type = guess_content_type(filename) - - has_content_type_header = any("content-type" in key.lower() for key in headers) - if content_type is not None and not has_content_type_header: - # note that unlike requests, we ignore the content_type provided in the 3rd - # tuple element if it is also included in the headers requests does - # the opposite (it overwrites the headerwith the 3rd tuple element) - headers["Content-Type"] = content_type - - if isinstance(fileobj, io.StringIO): - raise TypeError( - "Multipart file uploads require 'io.BytesIO', not 'io.StringIO'." - ) - if isinstance(fileobj, io.TextIOBase): - raise TypeError( - "Multipart file uploads must be opened in binary mode, not text mode." - ) - - self.filename = filename - self.file = fileobj - self.headers = headers - - def get_length(self) -> int | None: - headers = self.render_headers() - - if isinstance(self.file, (str, bytes)): - return len(headers) + len(to_bytes(self.file)) - - file_length = peek_filelike_length(self.file) - - # If we can't determine the filesize without reading it into memory, - # then return `None` here, to indicate an unknown file length. - if file_length is None: - return None - - return len(headers) + file_length - - def render_headers(self) -> bytes: - if not hasattr(self, "_headers"): - parts = [ - b"Content-Disposition: form-data; ", - format_form_param("name", self.name), - ] - if self.filename: - filename = format_form_param("filename", self.filename) - parts.extend([b"; ", filename]) - for header_name, header_value in self.headers.items(): - key, val = f"\r\n{header_name}: ".encode(), header_value.encode() - parts.extend([key, val]) - parts.append(b"\r\n\r\n") - self._headers = b"".join(parts) - - return self._headers - - def render_data(self) -> typing.Iterator[bytes]: - if isinstance(self.file, (str, bytes)): - yield to_bytes(self.file) - return - - if hasattr(self.file, "seek"): - try: - self.file.seek(0) - except io.UnsupportedOperation: - pass - - chunk = self.file.read(self.CHUNK_SIZE) - while chunk: - yield to_bytes(chunk) - chunk = self.file.read(self.CHUNK_SIZE) - - def render(self) -> typing.Iterator[bytes]: - yield self.render_headers() - yield from self.render_data() - - -class MultipartStream(SyncByteStream, AsyncByteStream): - """ - Request content as streaming multipart encoded form data. - """ - - def __init__( - self, - data: RequestData, - files: RequestFiles, - boundary: bytes | None = None, - ) -> None: - if boundary is None: - boundary = os.urandom(16).hex().encode("ascii") - - self.boundary = boundary - self.content_type = "multipart/form-data; boundary=%s" % boundary.decode( - "ascii" - ) - self.fields = list(self._iter_fields(data, files)) - - def _iter_fields( - self, data: RequestData, files: RequestFiles - ) -> typing.Iterator[FileField | DataField]: - for name, value in data.items(): - if isinstance(value, (tuple, list)): - for item in value: - yield DataField(name=name, value=item) - else: - yield DataField(name=name, value=value) - - file_items = files.items() if isinstance(files, typing.Mapping) else files - for name, value in file_items: - yield FileField(name=name, value=value) - - def iter_chunks(self) -> typing.Iterator[bytes]: - for field in self.fields: - yield b"--%s\r\n" % self.boundary - yield from field.render() - yield b"\r\n" - yield b"--%s--\r\n" % self.boundary - - def get_content_length(self) -> int | None: - """ - Return the length of the multipart encoded content, or `None` if - any of the files have a length that cannot be determined upfront. - """ - boundary_length = len(self.boundary) - length = 0 - - for field in self.fields: - field_length = field.get_length() - if field_length is None: - return None - - length += 2 + boundary_length + 2 # b"--{boundary}\r\n" - length += field_length - length += 2 # b"\r\n" - - length += 2 + boundary_length + 4 # b"--{boundary}--\r\n" - return length - - # Content stream interface. - - def get_headers(self) -> dict[str, str]: - content_length = self.get_content_length() - content_type = self.content_type - if content_length is None: - return {"Transfer-Encoding": "chunked", "Content-Type": content_type} - return {"Content-Length": str(content_length), "Content-Type": content_type} - - def __iter__(self) -> typing.Iterator[bytes]: - for chunk in self.iter_chunks(): - yield chunk - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - for chunk in self.iter_chunks(): - yield chunk diff --git a/myenv/Lib/site-packages/httpx/_status_codes.py b/myenv/Lib/site-packages/httpx/_status_codes.py deleted file mode 100644 index 133a623..0000000 --- a/myenv/Lib/site-packages/httpx/_status_codes.py +++ /dev/null @@ -1,162 +0,0 @@ -from __future__ import annotations - -from enum import IntEnum - -__all__ = ["codes"] - - -class codes(IntEnum): - """HTTP status codes and reason phrases - - Status codes from the following RFCs are all observed: - - * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 - * RFC 6585: Additional HTTP Status Codes - * RFC 3229: Delta encoding in HTTP - * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 - * RFC 5842: Binding Extensions to WebDAV - * RFC 7238: Permanent Redirect - * RFC 2295: Transparent Content Negotiation in HTTP - * RFC 2774: An HTTP Extension Framework - * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) - * RFC 2324: Hyper Text Coffee Pot Control Protocol (HTCPCP/1.0) - * RFC 7725: An HTTP Status Code to Report Legal Obstacles - * RFC 8297: An HTTP Status Code for Indicating Hints - * RFC 8470: Using Early Data in HTTP - """ - - def __new__(cls, value: int, phrase: str = "") -> codes: - obj = int.__new__(cls, value) - obj._value_ = value - - obj.phrase = phrase # type: ignore[attr-defined] - return obj - - def __str__(self) -> str: - return str(self.value) - - @classmethod - def get_reason_phrase(cls, value: int) -> str: - try: - return codes(value).phrase # type: ignore - except ValueError: - return "" - - @classmethod - def is_informational(cls, value: int) -> bool: - """ - Returns `True` for 1xx status codes, `False` otherwise. - """ - return 100 <= value <= 199 - - @classmethod - def is_success(cls, value: int) -> bool: - """ - Returns `True` for 2xx status codes, `False` otherwise. - """ - return 200 <= value <= 299 - - @classmethod - def is_redirect(cls, value: int) -> bool: - """ - Returns `True` for 3xx status codes, `False` otherwise. - """ - return 300 <= value <= 399 - - @classmethod - def is_client_error(cls, value: int) -> bool: - """ - Returns `True` for 4xx status codes, `False` otherwise. - """ - return 400 <= value <= 499 - - @classmethod - def is_server_error(cls, value: int) -> bool: - """ - Returns `True` for 5xx status codes, `False` otherwise. - """ - return 500 <= value <= 599 - - @classmethod - def is_error(cls, value: int) -> bool: - """ - Returns `True` for 4xx or 5xx status codes, `False` otherwise. - """ - return 400 <= value <= 599 - - # informational - CONTINUE = 100, "Continue" - SWITCHING_PROTOCOLS = 101, "Switching Protocols" - PROCESSING = 102, "Processing" - EARLY_HINTS = 103, "Early Hints" - - # success - OK = 200, "OK" - CREATED = 201, "Created" - ACCEPTED = 202, "Accepted" - NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" - NO_CONTENT = 204, "No Content" - RESET_CONTENT = 205, "Reset Content" - PARTIAL_CONTENT = 206, "Partial Content" - MULTI_STATUS = 207, "Multi-Status" - ALREADY_REPORTED = 208, "Already Reported" - IM_USED = 226, "IM Used" - - # redirection - MULTIPLE_CHOICES = 300, "Multiple Choices" - MOVED_PERMANENTLY = 301, "Moved Permanently" - FOUND = 302, "Found" - SEE_OTHER = 303, "See Other" - NOT_MODIFIED = 304, "Not Modified" - USE_PROXY = 305, "Use Proxy" - TEMPORARY_REDIRECT = 307, "Temporary Redirect" - PERMANENT_REDIRECT = 308, "Permanent Redirect" - - # client error - BAD_REQUEST = 400, "Bad Request" - UNAUTHORIZED = 401, "Unauthorized" - PAYMENT_REQUIRED = 402, "Payment Required" - FORBIDDEN = 403, "Forbidden" - NOT_FOUND = 404, "Not Found" - METHOD_NOT_ALLOWED = 405, "Method Not Allowed" - NOT_ACCEPTABLE = 406, "Not Acceptable" - PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" - REQUEST_TIMEOUT = 408, "Request Timeout" - CONFLICT = 409, "Conflict" - GONE = 410, "Gone" - LENGTH_REQUIRED = 411, "Length Required" - PRECONDITION_FAILED = 412, "Precondition Failed" - REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" - REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" - UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" - REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" - EXPECTATION_FAILED = 417, "Expectation Failed" - IM_A_TEAPOT = 418, "I'm a teapot" - MISDIRECTED_REQUEST = 421, "Misdirected Request" - UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" - LOCKED = 423, "Locked" - FAILED_DEPENDENCY = 424, "Failed Dependency" - TOO_EARLY = 425, "Too Early" - UPGRADE_REQUIRED = 426, "Upgrade Required" - PRECONDITION_REQUIRED = 428, "Precondition Required" - TOO_MANY_REQUESTS = 429, "Too Many Requests" - REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" - UNAVAILABLE_FOR_LEGAL_REASONS = 451, "Unavailable For Legal Reasons" - - # server errors - INTERNAL_SERVER_ERROR = 500, "Internal Server Error" - NOT_IMPLEMENTED = 501, "Not Implemented" - BAD_GATEWAY = 502, "Bad Gateway" - SERVICE_UNAVAILABLE = 503, "Service Unavailable" - GATEWAY_TIMEOUT = 504, "Gateway Timeout" - HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" - VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" - INSUFFICIENT_STORAGE = 507, "Insufficient Storage" - LOOP_DETECTED = 508, "Loop Detected" - NOT_EXTENDED = 510, "Not Extended" - NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" - - -# Include lower-case styles for `requests` compatibility. -for code in codes: - setattr(codes, code._name_.lower(), int(code)) diff --git a/myenv/Lib/site-packages/httpx/_transports/__init__.py b/myenv/Lib/site-packages/httpx/_transports/__init__.py deleted file mode 100644 index 7a32105..0000000 --- a/myenv/Lib/site-packages/httpx/_transports/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from .asgi import * -from .base import * -from .default import * -from .mock import * -from .wsgi import * - -__all__ = [ - "ASGITransport", - "AsyncBaseTransport", - "BaseTransport", - "AsyncHTTPTransport", - "HTTPTransport", - "MockTransport", - "WSGITransport", -] diff --git a/myenv/Lib/site-packages/httpx/_transports/asgi.py b/myenv/Lib/site-packages/httpx/_transports/asgi.py deleted file mode 100644 index 8578d4a..0000000 --- a/myenv/Lib/site-packages/httpx/_transports/asgi.py +++ /dev/null @@ -1,174 +0,0 @@ -from __future__ import annotations - -import typing - -import sniffio - -from .._models import Request, Response -from .._types import AsyncByteStream -from .base import AsyncBaseTransport - -if typing.TYPE_CHECKING: # pragma: no cover - import asyncio - - import trio - - Event = typing.Union[asyncio.Event, trio.Event] - - -_Message = typing.MutableMapping[str, typing.Any] -_Receive = typing.Callable[[], typing.Awaitable[_Message]] -_Send = typing.Callable[ - [typing.MutableMapping[str, typing.Any]], typing.Awaitable[None] -] -_ASGIApp = typing.Callable[ - [typing.MutableMapping[str, typing.Any], _Receive, _Send], typing.Awaitable[None] -] - -__all__ = ["ASGITransport"] - - -def create_event() -> Event: - if sniffio.current_async_library() == "trio": - import trio - - return trio.Event() - else: - import asyncio - - return asyncio.Event() - - -class ASGIResponseStream(AsyncByteStream): - def __init__(self, body: list[bytes]) -> None: - self._body = body - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - yield b"".join(self._body) - - -class ASGITransport(AsyncBaseTransport): - """ - A custom AsyncTransport that handles sending requests directly to an ASGI app. - - ```python - transport = httpx.ASGITransport( - app=app, - root_path="/submount", - client=("1.2.3.4", 123) - ) - client = httpx.AsyncClient(transport=transport) - ``` - - Arguments: - - * `app` - The ASGI application. - * `raise_app_exceptions` - Boolean indicating if exceptions in the application - should be raised. Default to `True`. Can be set to `False` for use cases - such as testing the content of a client 500 response. - * `root_path` - The root path on which the ASGI application should be mounted. - * `client` - A two-tuple indicating the client IP and port of incoming requests. - ``` - """ - - def __init__( - self, - app: _ASGIApp, - raise_app_exceptions: bool = True, - root_path: str = "", - client: tuple[str, int] = ("127.0.0.1", 123), - ) -> None: - self.app = app - self.raise_app_exceptions = raise_app_exceptions - self.root_path = root_path - self.client = client - - async def handle_async_request( - self, - request: Request, - ) -> Response: - assert isinstance(request.stream, AsyncByteStream) - - # ASGI scope. - scope = { - "type": "http", - "asgi": {"version": "3.0"}, - "http_version": "1.1", - "method": request.method, - "headers": [(k.lower(), v) for (k, v) in request.headers.raw], - "scheme": request.url.scheme, - "path": request.url.path, - "raw_path": request.url.raw_path.split(b"?")[0], - "query_string": request.url.query, - "server": (request.url.host, request.url.port), - "client": self.client, - "root_path": self.root_path, - } - - # Request. - request_body_chunks = request.stream.__aiter__() - request_complete = False - - # Response. - status_code = None - response_headers = None - body_parts = [] - response_started = False - response_complete = create_event() - - # ASGI callables. - - async def receive() -> dict[str, typing.Any]: - nonlocal request_complete - - if request_complete: - await response_complete.wait() - return {"type": "http.disconnect"} - - try: - body = await request_body_chunks.__anext__() - except StopAsyncIteration: - request_complete = True - return {"type": "http.request", "body": b"", "more_body": False} - return {"type": "http.request", "body": body, "more_body": True} - - async def send(message: typing.MutableMapping[str, typing.Any]) -> None: - nonlocal status_code, response_headers, response_started - - if message["type"] == "http.response.start": - assert not response_started - - status_code = message["status"] - response_headers = message.get("headers", []) - response_started = True - - elif message["type"] == "http.response.body": - assert not response_complete.is_set() - body = message.get("body", b"") - more_body = message.get("more_body", False) - - if body and request.method != "HEAD": - body_parts.append(body) - - if not more_body: - response_complete.set() - - try: - await self.app(scope, receive, send) - except Exception: # noqa: PIE-786 - if self.raise_app_exceptions: - raise - - response_complete.set() - if status_code is None: - status_code = 500 - if response_headers is None: - response_headers = {} - - assert response_complete.is_set() - assert status_code is not None - assert response_headers is not None - - stream = ASGIResponseStream(body_parts) - - return Response(status_code, headers=response_headers, stream=stream) diff --git a/myenv/Lib/site-packages/httpx/_transports/base.py b/myenv/Lib/site-packages/httpx/_transports/base.py deleted file mode 100644 index 66fd99d..0000000 --- a/myenv/Lib/site-packages/httpx/_transports/base.py +++ /dev/null @@ -1,86 +0,0 @@ -from __future__ import annotations - -import typing -from types import TracebackType - -from .._models import Request, Response - -T = typing.TypeVar("T", bound="BaseTransport") -A = typing.TypeVar("A", bound="AsyncBaseTransport") - -__all__ = ["AsyncBaseTransport", "BaseTransport"] - - -class BaseTransport: - def __enter__(self: T) -> T: - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - self.close() - - def handle_request(self, request: Request) -> Response: - """ - Send a single HTTP request and return a response. - - Developers shouldn't typically ever need to call into this API directly, - since the Client class provides all the higher level user-facing API - niceties. - - In order to properly release any network resources, the response - stream should *either* be consumed immediately, with a call to - `response.stream.read()`, or else the `handle_request` call should - be followed with a try/finally block to ensuring the stream is - always closed. - - Example usage: - - with httpx.HTTPTransport() as transport: - req = httpx.Request( - method=b"GET", - url=(b"https", b"www.example.com", 443, b"/"), - headers=[(b"Host", b"www.example.com")], - ) - resp = transport.handle_request(req) - body = resp.stream.read() - print(resp.status_code, resp.headers, body) - - - Takes a `Request` instance as the only argument. - - Returns a `Response` instance. - """ - raise NotImplementedError( - "The 'handle_request' method must be implemented." - ) # pragma: no cover - - def close(self) -> None: - pass - - -class AsyncBaseTransport: - async def __aenter__(self: A) -> A: - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - await self.aclose() - - async def handle_async_request( - self, - request: Request, - ) -> Response: - raise NotImplementedError( - "The 'handle_async_request' method must be implemented." - ) # pragma: no cover - - async def aclose(self) -> None: - pass diff --git a/myenv/Lib/site-packages/httpx/_transports/default.py b/myenv/Lib/site-packages/httpx/_transports/default.py deleted file mode 100644 index 33db416..0000000 --- a/myenv/Lib/site-packages/httpx/_transports/default.py +++ /dev/null @@ -1,389 +0,0 @@ -""" -Custom transports, with nicely configured defaults. - -The following additional keyword arguments are currently supported by httpcore... - -* uds: str -* local_address: str -* retries: int - -Example usages... - -# Disable HTTP/2 on a single specific domain. -mounts = { - "all://": httpx.HTTPTransport(http2=True), - "all://*example.org": httpx.HTTPTransport() -} - -# Using advanced httpcore configuration, with connection retries. -transport = httpx.HTTPTransport(retries=1) -client = httpx.Client(transport=transport) - -# Using advanced httpcore configuration, with unix domain sockets. -transport = httpx.HTTPTransport(uds="socket.uds") -client = httpx.Client(transport=transport) -""" - -from __future__ import annotations - -import contextlib -import typing -from types import TracebackType - -import httpcore - -from .._config import DEFAULT_LIMITS, Limits, Proxy, create_ssl_context -from .._exceptions import ( - ConnectError, - ConnectTimeout, - LocalProtocolError, - NetworkError, - PoolTimeout, - ProtocolError, - ProxyError, - ReadError, - ReadTimeout, - RemoteProtocolError, - TimeoutException, - UnsupportedProtocol, - WriteError, - WriteTimeout, -) -from .._models import Request, Response -from .._types import AsyncByteStream, CertTypes, ProxyTypes, SyncByteStream, VerifyTypes -from .._urls import URL -from .base import AsyncBaseTransport, BaseTransport - -T = typing.TypeVar("T", bound="HTTPTransport") -A = typing.TypeVar("A", bound="AsyncHTTPTransport") - -SOCKET_OPTION = typing.Union[ - typing.Tuple[int, int, int], - typing.Tuple[int, int, typing.Union[bytes, bytearray]], - typing.Tuple[int, int, None, int], -] - -__all__ = ["AsyncHTTPTransport", "HTTPTransport"] - - -@contextlib.contextmanager -def map_httpcore_exceptions() -> typing.Iterator[None]: - try: - yield - except Exception as exc: - mapped_exc = None - - for from_exc, to_exc in HTTPCORE_EXC_MAP.items(): - if not isinstance(exc, from_exc): - continue - # We want to map to the most specific exception we can find. - # Eg if `exc` is an `httpcore.ReadTimeout`, we want to map to - # `httpx.ReadTimeout`, not just `httpx.TimeoutException`. - if mapped_exc is None or issubclass(to_exc, mapped_exc): - mapped_exc = to_exc - - if mapped_exc is None: # pragma: no cover - raise - - message = str(exc) - raise mapped_exc(message) from exc - - -HTTPCORE_EXC_MAP = { - httpcore.TimeoutException: TimeoutException, - httpcore.ConnectTimeout: ConnectTimeout, - httpcore.ReadTimeout: ReadTimeout, - httpcore.WriteTimeout: WriteTimeout, - httpcore.PoolTimeout: PoolTimeout, - httpcore.NetworkError: NetworkError, - httpcore.ConnectError: ConnectError, - httpcore.ReadError: ReadError, - httpcore.WriteError: WriteError, - httpcore.ProxyError: ProxyError, - httpcore.UnsupportedProtocol: UnsupportedProtocol, - httpcore.ProtocolError: ProtocolError, - httpcore.LocalProtocolError: LocalProtocolError, - httpcore.RemoteProtocolError: RemoteProtocolError, -} - - -class ResponseStream(SyncByteStream): - def __init__(self, httpcore_stream: typing.Iterable[bytes]) -> None: - self._httpcore_stream = httpcore_stream - - def __iter__(self) -> typing.Iterator[bytes]: - with map_httpcore_exceptions(): - for part in self._httpcore_stream: - yield part - - def close(self) -> None: - if hasattr(self._httpcore_stream, "close"): - self._httpcore_stream.close() - - -class HTTPTransport(BaseTransport): - def __init__( - self, - verify: VerifyTypes = True, - cert: CertTypes | None = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - trust_env: bool = True, - proxy: ProxyTypes | None = None, - uds: str | None = None, - local_address: str | None = None, - retries: int = 0, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) - proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy - - if proxy is None: - self._pool = httpcore.ConnectionPool( - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - uds=uds, - local_address=local_address, - retries=retries, - socket_options=socket_options, - ) - elif proxy.url.scheme in ("http", "https"): - self._pool = httpcore.HTTPProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - proxy_headers=proxy.headers.raw, - ssl_context=ssl_context, - proxy_ssl_context=proxy.ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - socket_options=socket_options, - ) - elif proxy.url.scheme == "socks5": - try: - import socksio # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using SOCKS proxy, but the 'socksio' package is not installed. " - "Make sure to install httpx using `pip install httpx[socks]`." - ) from None - - self._pool = httpcore.SOCKSProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - ) - else: # pragma: no cover - raise ValueError( - "Proxy protocol must be either 'http', 'https', or 'socks5'," - f" but got {proxy.url.scheme!r}." - ) - - def __enter__(self: T) -> T: # Use generics for subclass support. - self._pool.__enter__() - return self - - def __exit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - with map_httpcore_exceptions(): - self._pool.__exit__(exc_type, exc_value, traceback) - - def handle_request( - self, - request: Request, - ) -> Response: - assert isinstance(request.stream, SyncByteStream) - - req = httpcore.Request( - method=request.method, - url=httpcore.URL( - scheme=request.url.raw_scheme, - host=request.url.raw_host, - port=request.url.port, - target=request.url.raw_path, - ), - headers=request.headers.raw, - content=request.stream, - extensions=request.extensions, - ) - with map_httpcore_exceptions(): - resp = self._pool.handle_request(req) - - assert isinstance(resp.stream, typing.Iterable) - - return Response( - status_code=resp.status, - headers=resp.headers, - stream=ResponseStream(resp.stream), - extensions=resp.extensions, - ) - - def close(self) -> None: - self._pool.close() - - -class AsyncResponseStream(AsyncByteStream): - def __init__(self, httpcore_stream: typing.AsyncIterable[bytes]) -> None: - self._httpcore_stream = httpcore_stream - - async def __aiter__(self) -> typing.AsyncIterator[bytes]: - with map_httpcore_exceptions(): - async for part in self._httpcore_stream: - yield part - - async def aclose(self) -> None: - if hasattr(self._httpcore_stream, "aclose"): - await self._httpcore_stream.aclose() - - -class AsyncHTTPTransport(AsyncBaseTransport): - def __init__( - self, - verify: VerifyTypes = True, - cert: CertTypes | None = None, - http1: bool = True, - http2: bool = False, - limits: Limits = DEFAULT_LIMITS, - trust_env: bool = True, - proxy: ProxyTypes | None = None, - uds: str | None = None, - local_address: str | None = None, - retries: int = 0, - socket_options: typing.Iterable[SOCKET_OPTION] | None = None, - ) -> None: - ssl_context = create_ssl_context(verify=verify, cert=cert, trust_env=trust_env) - proxy = Proxy(url=proxy) if isinstance(proxy, (str, URL)) else proxy - - if proxy is None: - self._pool = httpcore.AsyncConnectionPool( - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - uds=uds, - local_address=local_address, - retries=retries, - socket_options=socket_options, - ) - elif proxy.url.scheme in ("http", "https"): - self._pool = httpcore.AsyncHTTPProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - proxy_headers=proxy.headers.raw, - proxy_ssl_context=proxy.ssl_context, - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - socket_options=socket_options, - ) - elif proxy.url.scheme == "socks5": - try: - import socksio # noqa - except ImportError: # pragma: no cover - raise ImportError( - "Using SOCKS proxy, but the 'socksio' package is not installed. " - "Make sure to install httpx using `pip install httpx[socks]`." - ) from None - - self._pool = httpcore.AsyncSOCKSProxy( - proxy_url=httpcore.URL( - scheme=proxy.url.raw_scheme, - host=proxy.url.raw_host, - port=proxy.url.port, - target=proxy.url.raw_path, - ), - proxy_auth=proxy.raw_auth, - ssl_context=ssl_context, - max_connections=limits.max_connections, - max_keepalive_connections=limits.max_keepalive_connections, - keepalive_expiry=limits.keepalive_expiry, - http1=http1, - http2=http2, - ) - else: # pragma: no cover - raise ValueError( - "Proxy protocol must be either 'http', 'https', or 'socks5'," - " but got {proxy.url.scheme!r}." - ) - - async def __aenter__(self: A) -> A: # Use generics for subclass support. - await self._pool.__aenter__() - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None = None, - exc_value: BaseException | None = None, - traceback: TracebackType | None = None, - ) -> None: - with map_httpcore_exceptions(): - await self._pool.__aexit__(exc_type, exc_value, traceback) - - async def handle_async_request( - self, - request: Request, - ) -> Response: - assert isinstance(request.stream, AsyncByteStream) - - req = httpcore.Request( - method=request.method, - url=httpcore.URL( - scheme=request.url.raw_scheme, - host=request.url.raw_host, - port=request.url.port, - target=request.url.raw_path, - ), - headers=request.headers.raw, - content=request.stream, - extensions=request.extensions, - ) - with map_httpcore_exceptions(): - resp = await self._pool.handle_async_request(req) - - assert isinstance(resp.stream, typing.AsyncIterable) - - return Response( - status_code=resp.status, - headers=resp.headers, - stream=AsyncResponseStream(resp.stream), - extensions=resp.extensions, - ) - - async def aclose(self) -> None: - await self._pool.aclose() diff --git a/myenv/Lib/site-packages/httpx/_transports/mock.py b/myenv/Lib/site-packages/httpx/_transports/mock.py deleted file mode 100644 index 8c418f5..0000000 --- a/myenv/Lib/site-packages/httpx/_transports/mock.py +++ /dev/null @@ -1,43 +0,0 @@ -from __future__ import annotations - -import typing - -from .._models import Request, Response -from .base import AsyncBaseTransport, BaseTransport - -SyncHandler = typing.Callable[[Request], Response] -AsyncHandler = typing.Callable[[Request], typing.Coroutine[None, None, Response]] - - -__all__ = ["MockTransport"] - - -class MockTransport(AsyncBaseTransport, BaseTransport): - def __init__(self, handler: SyncHandler | AsyncHandler) -> None: - self.handler = handler - - def handle_request( - self, - request: Request, - ) -> Response: - request.read() - response = self.handler(request) - if not isinstance(response, Response): # pragma: no cover - raise TypeError("Cannot use an async handler in a sync Client") - return response - - async def handle_async_request( - self, - request: Request, - ) -> Response: - await request.aread() - response = self.handler(request) - - # Allow handler to *optionally* be an `async` function. - # If it is, then the `response` variable need to be awaited to actually - # return the result. - - if not isinstance(response, Response): - response = await response - - return response diff --git a/myenv/Lib/site-packages/httpx/_transports/wsgi.py b/myenv/Lib/site-packages/httpx/_transports/wsgi.py deleted file mode 100644 index 8592ffe..0000000 --- a/myenv/Lib/site-packages/httpx/_transports/wsgi.py +++ /dev/null @@ -1,149 +0,0 @@ -from __future__ import annotations - -import io -import itertools -import sys -import typing - -from .._models import Request, Response -from .._types import SyncByteStream -from .base import BaseTransport - -if typing.TYPE_CHECKING: - from _typeshed import OptExcInfo # pragma: no cover - from _typeshed.wsgi import WSGIApplication # pragma: no cover - -_T = typing.TypeVar("_T") - - -__all__ = ["WSGITransport"] - - -def _skip_leading_empty_chunks(body: typing.Iterable[_T]) -> typing.Iterable[_T]: - body = iter(body) - for chunk in body: - if chunk: - return itertools.chain([chunk], body) - return [] - - -class WSGIByteStream(SyncByteStream): - def __init__(self, result: typing.Iterable[bytes]) -> None: - self._close = getattr(result, "close", None) - self._result = _skip_leading_empty_chunks(result) - - def __iter__(self) -> typing.Iterator[bytes]: - for part in self._result: - yield part - - def close(self) -> None: - if self._close is not None: - self._close() - - -class WSGITransport(BaseTransport): - """ - A custom transport that handles sending requests directly to an WSGI app. - The simplest way to use this functionality is to use the `app` argument. - - ``` - client = httpx.Client(app=app) - ``` - - Alternatively, you can setup the transport instance explicitly. - This allows you to include any additional configuration arguments specific - to the WSGITransport class: - - ``` - transport = httpx.WSGITransport( - app=app, - script_name="/submount", - remote_addr="1.2.3.4" - ) - client = httpx.Client(transport=transport) - ``` - - Arguments: - - * `app` - The WSGI application. - * `raise_app_exceptions` - Boolean indicating if exceptions in the application - should be raised. Default to `True`. Can be set to `False` for use cases - such as testing the content of a client 500 response. - * `script_name` - The root path on which the WSGI application should be mounted. - * `remote_addr` - A string indicating the client IP of incoming requests. - ``` - """ - - def __init__( - self, - app: WSGIApplication, - raise_app_exceptions: bool = True, - script_name: str = "", - remote_addr: str = "127.0.0.1", - wsgi_errors: typing.TextIO | None = None, - ) -> None: - self.app = app - self.raise_app_exceptions = raise_app_exceptions - self.script_name = script_name - self.remote_addr = remote_addr - self.wsgi_errors = wsgi_errors - - def handle_request(self, request: Request) -> Response: - request.read() - wsgi_input = io.BytesIO(request.content) - - port = request.url.port or {"http": 80, "https": 443}[request.url.scheme] - environ = { - "wsgi.version": (1, 0), - "wsgi.url_scheme": request.url.scheme, - "wsgi.input": wsgi_input, - "wsgi.errors": self.wsgi_errors or sys.stderr, - "wsgi.multithread": True, - "wsgi.multiprocess": False, - "wsgi.run_once": False, - "REQUEST_METHOD": request.method, - "SCRIPT_NAME": self.script_name, - "PATH_INFO": request.url.path, - "QUERY_STRING": request.url.query.decode("ascii"), - "SERVER_NAME": request.url.host, - "SERVER_PORT": str(port), - "SERVER_PROTOCOL": "HTTP/1.1", - "REMOTE_ADDR": self.remote_addr, - } - for header_key, header_value in request.headers.raw: - key = header_key.decode("ascii").upper().replace("-", "_") - if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): - key = "HTTP_" + key - environ[key] = header_value.decode("ascii") - - seen_status = None - seen_response_headers = None - seen_exc_info = None - - def start_response( - status: str, - response_headers: list[tuple[str, str]], - exc_info: OptExcInfo | None = None, - ) -> typing.Callable[[bytes], typing.Any]: - nonlocal seen_status, seen_response_headers, seen_exc_info - seen_status = status - seen_response_headers = response_headers - seen_exc_info = exc_info - return lambda _: None - - result = self.app(environ, start_response) - - stream = WSGIByteStream(result) - - assert seen_status is not None - assert seen_response_headers is not None - if seen_exc_info and seen_exc_info[0] and self.raise_app_exceptions: - raise seen_exc_info[1] - - status_code = int(seen_status.split()[0]) - headers = [ - (key.encode("ascii"), value.encode("ascii")) - for key, value in seen_response_headers - ] - - return Response(status_code, headers=headers, stream=stream) diff --git a/myenv/Lib/site-packages/httpx/_types.py b/myenv/Lib/site-packages/httpx/_types.py deleted file mode 100644 index 661af26..0000000 --- a/myenv/Lib/site-packages/httpx/_types.py +++ /dev/null @@ -1,136 +0,0 @@ -""" -Type definitions for type checking purposes. -""" - -import ssl -from http.cookiejar import CookieJar -from typing import ( - IO, - TYPE_CHECKING, - Any, - AsyncIterable, - AsyncIterator, - Callable, - Dict, - Iterable, - Iterator, - List, - Mapping, - MutableMapping, - NamedTuple, - Optional, - Sequence, - Tuple, - Union, -) - -if TYPE_CHECKING: # pragma: no cover - from ._auth import Auth # noqa: F401 - from ._config import Proxy, Timeout # noqa: F401 - from ._models import Cookies, Headers, Request # noqa: F401 - from ._urls import URL, QueryParams # noqa: F401 - - -PrimitiveData = Optional[Union[str, int, float, bool]] - -RawURL = NamedTuple( - "RawURL", - [ - ("raw_scheme", bytes), - ("raw_host", bytes), - ("port", Optional[int]), - ("raw_path", bytes), - ], -) - -URLTypes = Union["URL", str] - -QueryParamTypes = Union[ - "QueryParams", - Mapping[str, Union[PrimitiveData, Sequence[PrimitiveData]]], - List[Tuple[str, PrimitiveData]], - Tuple[Tuple[str, PrimitiveData], ...], - str, - bytes, -] - -HeaderTypes = Union[ - "Headers", - Mapping[str, str], - Mapping[bytes, bytes], - Sequence[Tuple[str, str]], - Sequence[Tuple[bytes, bytes]], -] - -CookieTypes = Union["Cookies", CookieJar, Dict[str, str], List[Tuple[str, str]]] - -CertTypes = Union[ - # certfile - str, - # (certfile, keyfile) - Tuple[str, Optional[str]], - # (certfile, keyfile, password) - Tuple[str, Optional[str], Optional[str]], -] -VerifyTypes = Union[str, bool, ssl.SSLContext] -TimeoutTypes = Union[ - Optional[float], - Tuple[Optional[float], Optional[float], Optional[float], Optional[float]], - "Timeout", -] -ProxyTypes = Union["URL", str, "Proxy"] -ProxiesTypes = Union[ProxyTypes, Dict[Union["URL", str], Union[None, ProxyTypes]]] - -AuthTypes = Union[ - Tuple[Union[str, bytes], Union[str, bytes]], - Callable[["Request"], "Request"], - "Auth", -] - -RequestContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] -ResponseContent = Union[str, bytes, Iterable[bytes], AsyncIterable[bytes]] -ResponseExtensions = MutableMapping[str, Any] - -RequestData = Mapping[str, Any] - -FileContent = Union[IO[bytes], bytes, str] -FileTypes = Union[ - # file (or bytes) - FileContent, - # (filename, file (or bytes)) - Tuple[Optional[str], FileContent], - # (filename, file (or bytes), content_type) - Tuple[Optional[str], FileContent, Optional[str]], - # (filename, file (or bytes), content_type, headers) - Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], -] -RequestFiles = Union[Mapping[str, FileTypes], Sequence[Tuple[str, FileTypes]]] - -RequestExtensions = MutableMapping[str, Any] - -__all__ = ["AsyncByteStream", "SyncByteStream"] - - -class SyncByteStream: - def __iter__(self) -> Iterator[bytes]: - raise NotImplementedError( - "The '__iter__' method must be implemented." - ) # pragma: no cover - yield b"" # pragma: no cover - - def close(self) -> None: - """ - Subclasses can override this method to release any network resources - after a request/response cycle is complete. - """ - - -class AsyncByteStream: - async def __aiter__(self) -> AsyncIterator[bytes]: - raise NotImplementedError( - "The '__aiter__' method must be implemented." - ) # pragma: no cover - yield b"" # pragma: no cover - - async def aclose(self) -> None: - pass diff --git a/myenv/Lib/site-packages/httpx/_urlparse.py b/myenv/Lib/site-packages/httpx/_urlparse.py deleted file mode 100644 index 479c2ef..0000000 --- a/myenv/Lib/site-packages/httpx/_urlparse.py +++ /dev/null @@ -1,505 +0,0 @@ -""" -An implementation of `urlparse` that provides URL validation and normalization -as described by RFC3986. - -We rely on this implementation rather than the one in Python's stdlib, because: - -* It provides more complete URL validation. -* It properly differentiates between an empty querystring and an absent querystring, - to distinguish URLs with a trailing '?'. -* It handles scheme, hostname, port, and path normalization. -* It supports IDNA hostnames, normalizing them to their encoded form. -* The API supports passing individual components, as well as the complete URL string. - -Previously we relied on the excellent `rfc3986` package to handle URL parsing and -validation, but this module provides a simpler alternative, with less indirection -required. -""" - -from __future__ import annotations - -import ipaddress -import re -import typing - -import idna - -from ._exceptions import InvalidURL - -MAX_URL_LENGTH = 65536 - -# https://datatracker.ietf.org/doc/html/rfc3986.html#section-2.3 -UNRESERVED_CHARACTERS = ( - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~" -) -SUB_DELIMS = "!$&'()*+,;=" - -PERCENT_ENCODED_REGEX = re.compile("%[A-Fa-f0-9]{2}") - - -# {scheme}: (optional) -# //{authority} (optional) -# {path} -# ?{query} (optional) -# #{fragment} (optional) -URL_REGEX = re.compile( - ( - r"(?:(?P{scheme}):)?" - r"(?://(?P{authority}))?" - r"(?P{path})" - r"(?:\?(?P{query}))?" - r"(?:#(?P{fragment}))?" - ).format( - scheme="([a-zA-Z][a-zA-Z0-9+.-]*)?", - authority="[^/?#]*", - path="[^?#]*", - query="[^#]*", - fragment=".*", - ) -) - -# {userinfo}@ (optional) -# {host} -# :{port} (optional) -AUTHORITY_REGEX = re.compile( - ( - r"(?:(?P{userinfo})@)?" r"(?P{host})" r":?(?P{port})?" - ).format( - userinfo=".*", # Any character sequence. - host="(\\[.*\\]|[^:@]*)", # Either any character sequence excluding ':' or '@', - # or an IPv6 address enclosed within square brackets. - port=".*", # Any character sequence. - ) -) - - -# If we call urlparse with an individual component, then we need to regex -# validate that component individually. -# Note that we're duplicating the same strings as above. Shock! Horror!! -COMPONENT_REGEX = { - "scheme": re.compile("([a-zA-Z][a-zA-Z0-9+.-]*)?"), - "authority": re.compile("[^/?#]*"), - "path": re.compile("[^?#]*"), - "query": re.compile("[^#]*"), - "fragment": re.compile(".*"), - "userinfo": re.compile("[^@]*"), - "host": re.compile("(\\[.*\\]|[^:]*)"), - "port": re.compile(".*"), -} - - -# We use these simple regexs as a first pass before handing off to -# the stdlib 'ipaddress' module for IP address validation. -IPv4_STYLE_HOSTNAME = re.compile(r"^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$") -IPv6_STYLE_HOSTNAME = re.compile(r"^\[.*\]$") - - -class ParseResult(typing.NamedTuple): - scheme: str - userinfo: str - host: str - port: int | None - path: str - query: str | None - fragment: str | None - - @property - def authority(self) -> str: - return "".join( - [ - f"{self.userinfo}@" if self.userinfo else "", - f"[{self.host}]" if ":" in self.host else self.host, - f":{self.port}" if self.port is not None else "", - ] - ) - - @property - def netloc(self) -> str: - return "".join( - [ - f"[{self.host}]" if ":" in self.host else self.host, - f":{self.port}" if self.port is not None else "", - ] - ) - - def copy_with(self, **kwargs: str | None) -> ParseResult: - if not kwargs: - return self - - defaults = { - "scheme": self.scheme, - "authority": self.authority, - "path": self.path, - "query": self.query, - "fragment": self.fragment, - } - defaults.update(kwargs) - return urlparse("", **defaults) - - def __str__(self) -> str: - authority = self.authority - return "".join( - [ - f"{self.scheme}:" if self.scheme else "", - f"//{authority}" if authority else "", - self.path, - f"?{self.query}" if self.query is not None else "", - f"#{self.fragment}" if self.fragment is not None else "", - ] - ) - - -def urlparse(url: str = "", **kwargs: str | None) -> ParseResult: - # Initial basic checks on allowable URLs. - # --------------------------------------- - - # Hard limit the maximum allowable URL length. - if len(url) > MAX_URL_LENGTH: - raise InvalidURL("URL too long") - - # If a URL includes any ASCII control characters including \t, \r, \n, - # then treat it as invalid. - if any(char.isascii() and not char.isprintable() for char in url): - char = next(char for char in url if char.isascii() and not char.isprintable()) - idx = url.find(char) - error = ( - f"Invalid non-printable ASCII character in URL, {char!r} at position {idx}." - ) - raise InvalidURL(error) - - # Some keyword arguments require special handling. - # ------------------------------------------------ - - # Coerce "port" to a string, if it is provided as an integer. - if "port" in kwargs: - port = kwargs["port"] - kwargs["port"] = str(port) if isinstance(port, int) else port - - # Replace "netloc" with "host and "port". - if "netloc" in kwargs: - netloc = kwargs.pop("netloc") or "" - kwargs["host"], _, kwargs["port"] = netloc.partition(":") - - # Replace "username" and/or "password" with "userinfo". - if "username" in kwargs or "password" in kwargs: - username = quote(kwargs.pop("username", "") or "") - password = quote(kwargs.pop("password", "") or "") - kwargs["userinfo"] = f"{username}:{password}" if password else username - - # Replace "raw_path" with "path" and "query". - if "raw_path" in kwargs: - raw_path = kwargs.pop("raw_path") or "" - kwargs["path"], seperator, kwargs["query"] = raw_path.partition("?") - if not seperator: - kwargs["query"] = None - - # Ensure that IPv6 "host" addresses are always escaped with "[...]". - if "host" in kwargs: - host = kwargs.get("host") or "" - if ":" in host and not (host.startswith("[") and host.endswith("]")): - kwargs["host"] = f"[{host}]" - - # If any keyword arguments are provided, ensure they are valid. - # ------------------------------------------------------------- - - for key, value in kwargs.items(): - if value is not None: - if len(value) > MAX_URL_LENGTH: - raise InvalidURL(f"URL component '{key}' too long") - - # If a component includes any ASCII control characters including \t, \r, \n, - # then treat it as invalid. - if any(char.isascii() and not char.isprintable() for char in value): - char = next( - char for char in value if char.isascii() and not char.isprintable() - ) - idx = value.find(char) - error = ( - f"Invalid non-printable ASCII character in URL {key} component, " - f"{char!r} at position {idx}." - ) - raise InvalidURL(error) - - # Ensure that keyword arguments match as a valid regex. - if not COMPONENT_REGEX[key].fullmatch(value): - raise InvalidURL(f"Invalid URL component '{key}'") - - # The URL_REGEX will always match, but may have empty components. - url_match = URL_REGEX.match(url) - assert url_match is not None - url_dict = url_match.groupdict() - - # * 'scheme', 'authority', and 'path' may be empty strings. - # * 'query' may be 'None', indicating no trailing "?" portion. - # Any string including the empty string, indicates a trailing "?". - # * 'fragment' may be 'None', indicating no trailing "#" portion. - # Any string including the empty string, indicates a trailing "#". - scheme = kwargs.get("scheme", url_dict["scheme"]) or "" - authority = kwargs.get("authority", url_dict["authority"]) or "" - path = kwargs.get("path", url_dict["path"]) or "" - query = kwargs.get("query", url_dict["query"]) - fragment = kwargs.get("fragment", url_dict["fragment"]) - - # The AUTHORITY_REGEX will always match, but may have empty components. - authority_match = AUTHORITY_REGEX.match(authority) - assert authority_match is not None - authority_dict = authority_match.groupdict() - - # * 'userinfo' and 'host' may be empty strings. - # * 'port' may be 'None'. - userinfo = kwargs.get("userinfo", authority_dict["userinfo"]) or "" - host = kwargs.get("host", authority_dict["host"]) or "" - port = kwargs.get("port", authority_dict["port"]) - - # Normalize and validate each component. - # We end up with a parsed representation of the URL, - # with components that are plain ASCII bytestrings. - parsed_scheme: str = scheme.lower() - parsed_userinfo: str = quote(userinfo, safe=SUB_DELIMS + ":") - parsed_host: str = encode_host(host) - parsed_port: int | None = normalize_port(port, scheme) - - has_scheme = parsed_scheme != "" - has_authority = ( - parsed_userinfo != "" or parsed_host != "" or parsed_port is not None - ) - validate_path(path, has_scheme=has_scheme, has_authority=has_authority) - if has_scheme or has_authority: - path = normalize_path(path) - - # The GEN_DELIMS set is... : / ? # [ ] @ - # These do not need to be percent-quoted unless they serve as delimiters for the - # specific component. - WHATWG_SAFE = '`{}%|^\\"' - - # For 'path' we need to drop ? and # from the GEN_DELIMS set. - parsed_path: str = quote(path, safe=SUB_DELIMS + WHATWG_SAFE + ":/[]@") - # For 'query' we need to drop '#' from the GEN_DELIMS set. - parsed_query: str | None = ( - None - if query is None - else quote(query, safe=SUB_DELIMS + WHATWG_SAFE + ":/?[]@") - ) - # For 'fragment' we can include all of the GEN_DELIMS set. - parsed_fragment: str | None = ( - None - if fragment is None - else quote(fragment, safe=SUB_DELIMS + WHATWG_SAFE + ":/?#[]@") - ) - - # The parsed ASCII bytestrings are our canonical form. - # All properties of the URL are derived from these. - return ParseResult( - parsed_scheme, - parsed_userinfo, - parsed_host, - parsed_port, - parsed_path, - parsed_query, - parsed_fragment, - ) - - -def encode_host(host: str) -> str: - if not host: - return "" - - elif IPv4_STYLE_HOSTNAME.match(host): - # Validate IPv4 hostnames like #.#.#.# - # - # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 - # - # IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet - try: - ipaddress.IPv4Address(host) - except ipaddress.AddressValueError: - raise InvalidURL(f"Invalid IPv4 address: {host!r}") - return host - - elif IPv6_STYLE_HOSTNAME.match(host): - # Validate IPv6 hostnames like [...] - # - # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 - # - # "A host identified by an Internet Protocol literal address, version 6 - # [RFC3513] or later, is distinguished by enclosing the IP literal - # within square brackets ("[" and "]"). This is the only place where - # square bracket characters are allowed in the URI syntax." - try: - ipaddress.IPv6Address(host[1:-1]) - except ipaddress.AddressValueError: - raise InvalidURL(f"Invalid IPv6 address: {host!r}") - return host[1:-1] - - elif host.isascii(): - # Regular ASCII hostnames - # - # From https://datatracker.ietf.org/doc/html/rfc3986/#section-3.2.2 - # - # reg-name = *( unreserved / pct-encoded / sub-delims ) - WHATWG_SAFE = '"`{}%|\\' - return quote(host.lower(), safe=SUB_DELIMS + WHATWG_SAFE) - - # IDNA hostnames - try: - return idna.encode(host.lower()).decode("ascii") - except idna.IDNAError: - raise InvalidURL(f"Invalid IDNA hostname: {host!r}") - - -def normalize_port(port: str | int | None, scheme: str) -> int | None: - # From https://tools.ietf.org/html/rfc3986#section-3.2.3 - # - # "A scheme may define a default port. For example, the "http" scheme - # defines a default port of "80", corresponding to its reserved TCP - # port number. The type of port designated by the port number (e.g., - # TCP, UDP, SCTP) is defined by the URI scheme. URI producers and - # normalizers should omit the port component and its ":" delimiter if - # port is empty or if its value would be the same as that of the - # scheme's default." - if port is None or port == "": - return None - - try: - port_as_int = int(port) - except ValueError: - raise InvalidURL(f"Invalid port: {port!r}") - - # See https://url.spec.whatwg.org/#url-miscellaneous - default_port = {"ftp": 21, "http": 80, "https": 443, "ws": 80, "wss": 443}.get( - scheme - ) - if port_as_int == default_port: - return None - return port_as_int - - -def validate_path(path: str, has_scheme: bool, has_authority: bool) -> None: - """ - Path validation rules that depend on if the URL contains - a scheme or authority component. - - See https://datatracker.ietf.org/doc/html/rfc3986.html#section-3.3 - """ - if has_authority: - # If a URI contains an authority component, then the path component - # must either be empty or begin with a slash ("/") character." - if path and not path.startswith("/"): - raise InvalidURL("For absolute URLs, path must be empty or begin with '/'") - - if not has_scheme and not has_authority: - # If a URI does not contain an authority component, then the path cannot begin - # with two slash characters ("//"). - if path.startswith("//"): - raise InvalidURL("Relative URLs cannot have a path starting with '//'") - - # In addition, a URI reference (Section 4.1) may be a relative-path reference, - # in which case the first path segment cannot contain a colon (":") character. - if path.startswith(":"): - raise InvalidURL("Relative URLs cannot have a path starting with ':'") - - -def normalize_path(path: str) -> str: - """ - Drop "." and ".." segments from a URL path. - - For example: - - normalize_path("/path/./to/somewhere/..") == "/path/to" - """ - # Fast return when no '.' characters in the path. - if "." not in path: - return path - - components = path.split("/") - - # Fast return when no '.' or '..' components in the path. - if "." not in components and ".." not in components: - return path - - # https://datatracker.ietf.org/doc/html/rfc3986#section-5.2.4 - output: list[str] = [] - for component in components: - if component == ".": - pass - elif component == "..": - if output and output != [""]: - output.pop() - else: - output.append(component) - return "/".join(output) - - -def PERCENT(string: str) -> str: - return "".join([f"%{byte:02X}" for byte in string.encode("utf-8")]) - - -def percent_encoded(string: str, safe: str = "/") -> str: - """ - Use percent-encoding to quote a string. - """ - NON_ESCAPED_CHARS = UNRESERVED_CHARACTERS + safe - - # Fast path for strings that don't need escaping. - if not string.rstrip(NON_ESCAPED_CHARS): - return string - - return "".join( - [char if char in NON_ESCAPED_CHARS else PERCENT(char) for char in string] - ) - - -def quote(string: str, safe: str = "/") -> str: - """ - Use percent-encoding to quote a string, omitting existing '%xx' escape sequences. - - See: https://www.rfc-editor.org/rfc/rfc3986#section-2.1 - - * `string`: The string to be percent-escaped. - * `safe`: A string containing characters that may be treated as safe, and do not - need to be escaped. Unreserved characters are always treated as safe. - See: https://www.rfc-editor.org/rfc/rfc3986#section-2.3 - """ - parts = [] - current_position = 0 - for match in re.finditer(PERCENT_ENCODED_REGEX, string): - start_position, end_position = match.start(), match.end() - matched_text = match.group(0) - # Add any text up to the '%xx' escape sequence. - if start_position != current_position: - leading_text = string[current_position:start_position] - parts.append(percent_encoded(leading_text, safe=safe)) - - # Add the '%xx' escape sequence. - parts.append(matched_text) - current_position = end_position - - # Add any text after the final '%xx' escape sequence. - if current_position != len(string): - trailing_text = string[current_position:] - parts.append(percent_encoded(trailing_text, safe=safe)) - - return "".join(parts) - - -def urlencode(items: list[tuple[str, str]]) -> str: - """ - We can use a much simpler version of the stdlib urlencode here because - we don't need to handle a bunch of different typing cases, such as bytes vs str. - - https://github.com/python/cpython/blob/b2f7b2ef0b5421e01efb8c7bee2ef95d3bab77eb/Lib/urllib/parse.py#L926 - - Note that we use '%20' encoding for spaces. and '%2F for '/'. - This is slightly different than `requests`, but is the behaviour that browsers use. - - See - - https://github.com/encode/httpx/issues/2536 - - https://github.com/encode/httpx/issues/2721 - - https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode - """ - return "&".join( - [ - percent_encoded(k, safe="") + "=" + percent_encoded(v, safe="") - for k, v in items - ] - ) diff --git a/myenv/Lib/site-packages/httpx/_urls.py b/myenv/Lib/site-packages/httpx/_urls.py deleted file mode 100644 index ec4ea6b..0000000 --- a/myenv/Lib/site-packages/httpx/_urls.py +++ /dev/null @@ -1,648 +0,0 @@ -from __future__ import annotations - -import typing -from urllib.parse import parse_qs, unquote - -import idna - -from ._types import QueryParamTypes, RawURL -from ._urlparse import urlencode, urlparse -from ._utils import primitive_value_to_str - -__all__ = ["URL", "QueryParams"] - - -class URL: - """ - url = httpx.URL("HTTPS://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink") - - assert url.scheme == "https" - assert url.username == "jo@email.com" - assert url.password == "a secret" - assert url.userinfo == b"jo%40email.com:a%20secret" - assert url.host == "müller.de" - assert url.raw_host == b"xn--mller-kva.de" - assert url.port == 1234 - assert url.netloc == b"xn--mller-kva.de:1234" - assert url.path == "/pa th" - assert url.query == b"?search=ab" - assert url.raw_path == b"/pa%20th?search=ab" - assert url.fragment == "anchorlink" - - The components of a URL are broken down like this: - - https://jo%40email.com:a%20secret@müller.de:1234/pa%20th?search=ab#anchorlink - [scheme] [ username ] [password] [ host ][port][ path ] [ query ] [fragment] - [ userinfo ] [ netloc ][ raw_path ] - - Note that: - - * `url.scheme` is normalized to always be lowercased. - - * `url.host` is normalized to always be lowercased. Internationalized domain - names are represented in unicode, without IDNA encoding applied. For instance: - - url = httpx.URL("http://中国.icom.museum") - assert url.host == "中国.icom.museum" - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.host == "中国.icom.museum" - - * `url.raw_host` is normalized to always be lowercased, and is IDNA encoded. - - url = httpx.URL("http://中国.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - * `url.port` is either None or an integer. URLs that include the default port for - "http", "https", "ws", "wss", and "ftp" schemes have their port - normalized to `None`. - - assert httpx.URL("http://example.com") == httpx.URL("http://example.com:80") - assert httpx.URL("http://example.com").port is None - assert httpx.URL("http://example.com:80").port is None - - * `url.userinfo` is raw bytes, without URL escaping. Usually you'll want to work - with `url.username` and `url.password` instead, which handle the URL escaping. - - * `url.raw_path` is raw bytes of both the path and query, without URL escaping. - This portion is used as the target when constructing HTTP requests. Usually you'll - want to work with `url.path` instead. - - * `url.query` is raw bytes, without URL escaping. A URL query string portion can - only be properly URL escaped when decoding the parameter names and values - themselves. - """ - - def __init__(self, url: URL | str = "", **kwargs: typing.Any) -> None: - if kwargs: - allowed = { - "scheme": str, - "username": str, - "password": str, - "userinfo": bytes, - "host": str, - "port": int, - "netloc": bytes, - "path": str, - "query": bytes, - "raw_path": bytes, - "fragment": str, - "params": object, - } - - # Perform type checking for all supported keyword arguments. - for key, value in kwargs.items(): - if key not in allowed: - message = f"{key!r} is an invalid keyword argument for URL()" - raise TypeError(message) - if value is not None and not isinstance(value, allowed[key]): - expected = allowed[key].__name__ - seen = type(value).__name__ - message = f"Argument {key!r} must be {expected} but got {seen}" - raise TypeError(message) - if isinstance(value, bytes): - kwargs[key] = value.decode("ascii") - - if "params" in kwargs: - # Replace any "params" keyword with the raw "query" instead. - # - # Ensure that empty params use `kwargs["query"] = None` rather - # than `kwargs["query"] = ""`, so that generated URLs do not - # include an empty trailing "?". - params = kwargs.pop("params") - kwargs["query"] = None if not params else str(QueryParams(params)) - - if isinstance(url, str): - self._uri_reference = urlparse(url, **kwargs) - elif isinstance(url, URL): - self._uri_reference = url._uri_reference.copy_with(**kwargs) - else: - raise TypeError( - "Invalid type for url. Expected str or httpx.URL," - f" got {type(url)}: {url!r}" - ) - - @property - def scheme(self) -> str: - """ - The URL scheme, such as "http", "https". - Always normalised to lowercase. - """ - return self._uri_reference.scheme - - @property - def raw_scheme(self) -> bytes: - """ - The raw bytes representation of the URL scheme, such as b"http", b"https". - Always normalised to lowercase. - """ - return self._uri_reference.scheme.encode("ascii") - - @property - def userinfo(self) -> bytes: - """ - The URL userinfo as a raw bytestring. - For example: b"jo%40email.com:a%20secret". - """ - return self._uri_reference.userinfo.encode("ascii") - - @property - def username(self) -> str: - """ - The URL username as a string, with URL decoding applied. - For example: "jo@email.com" - """ - userinfo = self._uri_reference.userinfo - return unquote(userinfo.partition(":")[0]) - - @property - def password(self) -> str: - """ - The URL password as a string, with URL decoding applied. - For example: "a secret" - """ - userinfo = self._uri_reference.userinfo - return unquote(userinfo.partition(":")[2]) - - @property - def host(self) -> str: - """ - The URL host as a string. - Always normalized to lowercase, with IDNA hosts decoded into unicode. - - Examples: - - url = httpx.URL("http://www.EXAMPLE.org") - assert url.host == "www.example.org" - - url = httpx.URL("http://中国.icom.museum") - assert url.host == "中国.icom.museum" - - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.host == "中国.icom.museum" - - url = httpx.URL("https://[::ffff:192.168.0.1]") - assert url.host == "::ffff:192.168.0.1" - """ - host: str = self._uri_reference.host - - if host.startswith("xn--"): - host = idna.decode(host) - - return host - - @property - def raw_host(self) -> bytes: - """ - The raw bytes representation of the URL host. - Always normalized to lowercase, and IDNA encoded. - - Examples: - - url = httpx.URL("http://www.EXAMPLE.org") - assert url.raw_host == b"www.example.org" - - url = httpx.URL("http://中国.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - url = httpx.URL("http://xn--fiqs8s.icom.museum") - assert url.raw_host == b"xn--fiqs8s.icom.museum" - - url = httpx.URL("https://[::ffff:192.168.0.1]") - assert url.raw_host == b"::ffff:192.168.0.1" - """ - return self._uri_reference.host.encode("ascii") - - @property - def port(self) -> int | None: - """ - The URL port as an integer. - - Note that the URL class performs port normalization as per the WHATWG spec. - Default ports for "http", "https", "ws", "wss", and "ftp" schemes are always - treated as `None`. - - For example: - - assert httpx.URL("http://www.example.com") == httpx.URL("http://www.example.com:80") - assert httpx.URL("http://www.example.com:80").port is None - """ - return self._uri_reference.port - - @property - def netloc(self) -> bytes: - """ - Either `` or `:` as bytes. - Always normalized to lowercase, and IDNA encoded. - - This property may be used for generating the value of a request - "Host" header. - """ - return self._uri_reference.netloc.encode("ascii") - - @property - def path(self) -> str: - """ - The URL path as a string. Excluding the query string, and URL decoded. - - For example: - - url = httpx.URL("https://example.com/pa%20th") - assert url.path == "/pa th" - """ - path = self._uri_reference.path or "/" - return unquote(path) - - @property - def query(self) -> bytes: - """ - The URL query string, as raw bytes, excluding the leading b"?". - - This is necessarily a bytewise interface, because we cannot - perform URL decoding of this representation until we've parsed - the keys and values into a QueryParams instance. - - For example: - - url = httpx.URL("https://example.com/?filter=some%20search%20terms") - assert url.query == b"filter=some%20search%20terms" - """ - query = self._uri_reference.query or "" - return query.encode("ascii") - - @property - def params(self) -> QueryParams: - """ - The URL query parameters, neatly parsed and packaged into an immutable - multidict representation. - """ - return QueryParams(self._uri_reference.query) - - @property - def raw_path(self) -> bytes: - """ - The complete URL path and query string as raw bytes. - Used as the target when constructing HTTP requests. - - For example: - - GET /users?search=some%20text HTTP/1.1 - Host: www.example.org - Connection: close - """ - path = self._uri_reference.path or "/" - if self._uri_reference.query is not None: - path += "?" + self._uri_reference.query - return path.encode("ascii") - - @property - def fragment(self) -> str: - """ - The URL fragments, as used in HTML anchors. - As a string, without the leading '#'. - """ - return unquote(self._uri_reference.fragment or "") - - @property - def raw(self) -> RawURL: - """ - Provides the (scheme, host, port, target) for the outgoing request. - - In older versions of `httpx` this was used in the low-level transport API. - We no longer use `RawURL`, and this property will be deprecated - in a future release. - """ - return RawURL( - self.raw_scheme, - self.raw_host, - self.port, - self.raw_path, - ) - - @property - def is_absolute_url(self) -> bool: - """ - Return `True` for absolute URLs such as 'http://example.com/path', - and `False` for relative URLs such as '/path'. - """ - # We don't use `.is_absolute` from `rfc3986` because it treats - # URLs with a fragment portion as not absolute. - # What we actually care about is if the URL provides - # a scheme and hostname to which connections should be made. - return bool(self._uri_reference.scheme and self._uri_reference.host) - - @property - def is_relative_url(self) -> bool: - """ - Return `False` for absolute URLs such as 'http://example.com/path', - and `True` for relative URLs such as '/path'. - """ - return not self.is_absolute_url - - def copy_with(self, **kwargs: typing.Any) -> URL: - """ - Copy this URL, returning a new URL with some components altered. - Accepts the same set of parameters as the components that are made - available via properties on the `URL` class. - - For example: - - url = httpx.URL("https://www.example.com").copy_with( - username="jo@gmail.com", password="a secret" - ) - assert url == "https://jo%40email.com:a%20secret@www.example.com" - """ - return URL(self, **kwargs) - - def copy_set_param(self, key: str, value: typing.Any = None) -> URL: - return self.copy_with(params=self.params.set(key, value)) - - def copy_add_param(self, key: str, value: typing.Any = None) -> URL: - return self.copy_with(params=self.params.add(key, value)) - - def copy_remove_param(self, key: str) -> URL: - return self.copy_with(params=self.params.remove(key)) - - def copy_merge_params(self, params: QueryParamTypes) -> URL: - return self.copy_with(params=self.params.merge(params)) - - def join(self, url: URL | str) -> URL: - """ - Return an absolute URL, using this URL as the base. - - Eg. - - url = httpx.URL("https://www.example.com/test") - url = url.join("/new/path") - assert url == "https://www.example.com/new/path" - """ - from urllib.parse import urljoin - - return URL(urljoin(str(self), str(URL(url)))) - - def __hash__(self) -> int: - return hash(str(self)) - - def __eq__(self, other: typing.Any) -> bool: - return isinstance(other, (URL, str)) and str(self) == str(URL(other)) - - def __str__(self) -> str: - return str(self._uri_reference) - - def __repr__(self) -> str: - scheme, userinfo, host, port, path, query, fragment = self._uri_reference - - if ":" in userinfo: - # Mask any password component. - userinfo = f'{userinfo.split(":")[0]}:[secure]' - - authority = "".join( - [ - f"{userinfo}@" if userinfo else "", - f"[{host}]" if ":" in host else host, - f":{port}" if port is not None else "", - ] - ) - url = "".join( - [ - f"{self.scheme}:" if scheme else "", - f"//{authority}" if authority else "", - path, - f"?{query}" if query is not None else "", - f"#{fragment}" if fragment is not None else "", - ] - ) - - return f"{self.__class__.__name__}({url!r})" - - -class QueryParams(typing.Mapping[str, str]): - """ - URL query parameters, as a multi-dict. - """ - - def __init__(self, *args: QueryParamTypes | None, **kwargs: typing.Any) -> None: - assert len(args) < 2, "Too many arguments." - assert not (args and kwargs), "Cannot mix named and unnamed arguments." - - value = args[0] if args else kwargs - - if value is None or isinstance(value, (str, bytes)): - value = value.decode("ascii") if isinstance(value, bytes) else value - self._dict = parse_qs(value, keep_blank_values=True) - elif isinstance(value, QueryParams): - self._dict = {k: list(v) for k, v in value._dict.items()} - else: - dict_value: dict[typing.Any, list[typing.Any]] = {} - if isinstance(value, (list, tuple)): - # Convert list inputs like: - # [("a", "123"), ("a", "456"), ("b", "789")] - # To a dict representation, like: - # {"a": ["123", "456"], "b": ["789"]} - for item in value: - dict_value.setdefault(item[0], []).append(item[1]) - else: - # Convert dict inputs like: - # {"a": "123", "b": ["456", "789"]} - # To dict inputs where values are always lists, like: - # {"a": ["123"], "b": ["456", "789"]} - dict_value = { - k: list(v) if isinstance(v, (list, tuple)) else [v] - for k, v in value.items() - } - - # Ensure that keys and values are neatly coerced to strings. - # We coerce values `True` and `False` to JSON-like "true" and "false" - # representations, and coerce `None` values to the empty string. - self._dict = { - str(k): [primitive_value_to_str(item) for item in v] - for k, v in dict_value.items() - } - - def keys(self) -> typing.KeysView[str]: - """ - Return all the keys in the query params. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.keys()) == ["a", "b"] - """ - return self._dict.keys() - - def values(self) -> typing.ValuesView[str]: - """ - Return all the values in the query params. If a key occurs more than once - only the first item for that key is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.values()) == ["123", "789"] - """ - return {k: v[0] for k, v in self._dict.items()}.values() - - def items(self) -> typing.ItemsView[str, str]: - """ - Return all items in the query params. If a key occurs more than once - only the first item for that key is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.items()) == [("a", "123"), ("b", "789")] - """ - return {k: v[0] for k, v in self._dict.items()}.items() - - def multi_items(self) -> list[tuple[str, str]]: - """ - Return all items in the query params. Allow duplicate keys to occur. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert list(q.multi_items()) == [("a", "123"), ("a", "456"), ("b", "789")] - """ - multi_items: list[tuple[str, str]] = [] - for k, v in self._dict.items(): - multi_items.extend([(k, i) for i in v]) - return multi_items - - def get(self, key: typing.Any, default: typing.Any = None) -> typing.Any: - """ - Get a value from the query param for a given key. If the key occurs - more than once, then only the first value is returned. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert q.get("a") == "123" - """ - if key in self._dict: - return self._dict[str(key)][0] - return default - - def get_list(self, key: str) -> list[str]: - """ - Get all values from the query param for a given key. - - Usage: - - q = httpx.QueryParams("a=123&a=456&b=789") - assert q.get_list("a") == ["123", "456"] - """ - return list(self._dict.get(str(key), [])) - - def set(self, key: str, value: typing.Any = None) -> QueryParams: - """ - Return a new QueryParams instance, setting the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.set("a", "456") - assert q == httpx.QueryParams("a=456") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict[str(key)] = [primitive_value_to_str(value)] - return q - - def add(self, key: str, value: typing.Any = None) -> QueryParams: - """ - Return a new QueryParams instance, setting or appending the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.add("a", "456") - assert q == httpx.QueryParams("a=123&a=456") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict[str(key)] = q.get_list(key) + [primitive_value_to_str(value)] - return q - - def remove(self, key: str) -> QueryParams: - """ - Return a new QueryParams instance, removing the value of a key. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.remove("a") - assert q == httpx.QueryParams("") - """ - q = QueryParams() - q._dict = dict(self._dict) - q._dict.pop(str(key), None) - return q - - def merge(self, params: QueryParamTypes | None = None) -> QueryParams: - """ - Return a new QueryParams instance, updated with. - - Usage: - - q = httpx.QueryParams("a=123") - q = q.merge({"b": "456"}) - assert q == httpx.QueryParams("a=123&b=456") - - q = httpx.QueryParams("a=123") - q = q.merge({"a": "456", "b": "789"}) - assert q == httpx.QueryParams("a=456&b=789") - """ - q = QueryParams(params) - q._dict = {**self._dict, **q._dict} - return q - - def __getitem__(self, key: typing.Any) -> str: - return self._dict[key][0] - - def __contains__(self, key: typing.Any) -> bool: - return key in self._dict - - def __iter__(self) -> typing.Iterator[typing.Any]: - return iter(self.keys()) - - def __len__(self) -> int: - return len(self._dict) - - def __bool__(self) -> bool: - return bool(self._dict) - - def __hash__(self) -> int: - return hash(str(self)) - - def __eq__(self, other: typing.Any) -> bool: - if not isinstance(other, self.__class__): - return False - return sorted(self.multi_items()) == sorted(other.multi_items()) - - def __str__(self) -> str: - """ - Note that we use '%20' encoding for spaces, and treat '/' as a safe - character. - - See https://github.com/encode/httpx/issues/2536 and - https://docs.python.org/3/library/urllib.parse.html#urllib.parse.urlencode - """ - return urlencode(self.multi_items()) - - def __repr__(self) -> str: - class_name = self.__class__.__name__ - query_string = str(self) - return f"{class_name}({query_string!r})" - - def update(self, params: QueryParamTypes | None = None) -> None: - raise RuntimeError( - "QueryParams are immutable since 0.18.0. " - "Use `q = q.merge(...)` to create an updated copy." - ) - - def __setitem__(self, key: str, value: str) -> None: - raise RuntimeError( - "QueryParams are immutable since 0.18.0. " - "Use `q = q.set(key, value)` to create an updated copy." - ) diff --git a/myenv/Lib/site-packages/httpx/_utils.py b/myenv/Lib/site-packages/httpx/_utils.py deleted file mode 100644 index a9ece19..0000000 --- a/myenv/Lib/site-packages/httpx/_utils.py +++ /dev/null @@ -1,440 +0,0 @@ -from __future__ import annotations - -import codecs -import email.message -import ipaddress -import mimetypes -import os -import re -import time -import typing -from pathlib import Path -from urllib.request import getproxies - -import sniffio - -from ._types import PrimitiveData - -if typing.TYPE_CHECKING: # pragma: no cover - from ._urls import URL - - -_HTML5_FORM_ENCODING_REPLACEMENTS = {'"': "%22", "\\": "\\\\"} -_HTML5_FORM_ENCODING_REPLACEMENTS.update( - {chr(c): "%{:02X}".format(c) for c in range(0x1F + 1) if c != 0x1B} -) -_HTML5_FORM_ENCODING_RE = re.compile( - r"|".join([re.escape(c) for c in _HTML5_FORM_ENCODING_REPLACEMENTS.keys()]) -) - - -def normalize_header_key( - value: str | bytes, - lower: bool, - encoding: str | None = None, -) -> bytes: - """ - Coerce str/bytes into a strictly byte-wise HTTP header key. - """ - if isinstance(value, bytes): - bytes_value = value - else: - bytes_value = value.encode(encoding or "ascii") - - return bytes_value.lower() if lower else bytes_value - - -def normalize_header_value(value: str | bytes, encoding: str | None = None) -> bytes: - """ - Coerce str/bytes into a strictly byte-wise HTTP header value. - """ - if isinstance(value, bytes): - return value - return value.encode(encoding or "ascii") - - -def primitive_value_to_str(value: PrimitiveData) -> str: - """ - Coerce a primitive data type into a string value. - - Note that we prefer JSON-style 'true'/'false' for boolean values here. - """ - if value is True: - return "true" - elif value is False: - return "false" - elif value is None: - return "" - return str(value) - - -def is_known_encoding(encoding: str) -> bool: - """ - Return `True` if `encoding` is a known codec. - """ - try: - codecs.lookup(encoding) - except LookupError: - return False - return True - - -def format_form_param(name: str, value: str) -> bytes: - """ - Encode a name/value pair within a multipart form. - """ - - def replacer(match: typing.Match[str]) -> str: - return _HTML5_FORM_ENCODING_REPLACEMENTS[match.group(0)] - - value = _HTML5_FORM_ENCODING_RE.sub(replacer, value) - return f'{name}="{value}"'.encode() - - -def get_ca_bundle_from_env() -> str | None: - if "SSL_CERT_FILE" in os.environ: - ssl_file = Path(os.environ["SSL_CERT_FILE"]) - if ssl_file.is_file(): - return str(ssl_file) - if "SSL_CERT_DIR" in os.environ: - ssl_path = Path(os.environ["SSL_CERT_DIR"]) - if ssl_path.is_dir(): - return str(ssl_path) - return None - - -def parse_header_links(value: str) -> list[dict[str, str]]: - """ - Returns a list of parsed link headers, for more info see: - https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Link - The generic syntax of those is: - Link: < uri-reference >; param1=value1; param2="value2" - So for instance: - Link; '; type="image/jpeg",;' - would return - [ - {"url": "http:/.../front.jpeg", "type": "image/jpeg"}, - {"url": "http://.../back.jpeg"}, - ] - :param value: HTTP Link entity-header field - :return: list of parsed link headers - """ - links: list[dict[str, str]] = [] - replace_chars = " '\"" - value = value.strip(replace_chars) - if not value: - return links - for val in re.split(", *<", value): - try: - url, params = val.split(";", 1) - except ValueError: - url, params = val, "" - link = {"url": url.strip("<> '\"")} - for param in params.split(";"): - try: - key, value = param.split("=") - except ValueError: - break - link[key.strip(replace_chars)] = value.strip(replace_chars) - links.append(link) - return links - - -def parse_content_type_charset(content_type: str) -> str | None: - # We used to use `cgi.parse_header()` here, but `cgi` became a dead battery. - # See: https://peps.python.org/pep-0594/#cgi - msg = email.message.Message() - msg["content-type"] = content_type - return msg.get_content_charset(failobj=None) - - -SENSITIVE_HEADERS = {"authorization", "proxy-authorization"} - - -def obfuscate_sensitive_headers( - items: typing.Iterable[tuple[typing.AnyStr, typing.AnyStr]], -) -> typing.Iterator[tuple[typing.AnyStr, typing.AnyStr]]: - for k, v in items: - if to_str(k.lower()) in SENSITIVE_HEADERS: - v = to_bytes_or_str("[secure]", match_type_of=v) - yield k, v - - -def port_or_default(url: URL) -> int | None: - if url.port is not None: - return url.port - return {"http": 80, "https": 443}.get(url.scheme) - - -def same_origin(url: URL, other: URL) -> bool: - """ - Return 'True' if the given URLs share the same origin. - """ - return ( - url.scheme == other.scheme - and url.host == other.host - and port_or_default(url) == port_or_default(other) - ) - - -def is_https_redirect(url: URL, location: URL) -> bool: - """ - Return 'True' if 'location' is a HTTPS upgrade of 'url' - """ - if url.host != location.host: - return False - - return ( - url.scheme == "http" - and port_or_default(url) == 80 - and location.scheme == "https" - and port_or_default(location) == 443 - ) - - -def get_environment_proxies() -> dict[str, str | None]: - """Gets proxy information from the environment""" - - # urllib.request.getproxies() falls back on System - # Registry and Config for proxies on Windows and macOS. - # We don't want to propagate non-HTTP proxies into - # our configuration such as 'TRAVIS_APT_PROXY'. - proxy_info = getproxies() - mounts: dict[str, str | None] = {} - - for scheme in ("http", "https", "all"): - if proxy_info.get(scheme): - hostname = proxy_info[scheme] - mounts[f"{scheme}://"] = ( - hostname if "://" in hostname else f"http://{hostname}" - ) - - no_proxy_hosts = [host.strip() for host in proxy_info.get("no", "").split(",")] - for hostname in no_proxy_hosts: - # See https://curl.haxx.se/libcurl/c/CURLOPT_NOPROXY.html for details - # on how names in `NO_PROXY` are handled. - if hostname == "*": - # If NO_PROXY=* is used or if "*" occurs as any one of the comma - # separated hostnames, then we should just bypass any information - # from HTTP_PROXY, HTTPS_PROXY, ALL_PROXY, and always ignore - # proxies. - return {} - elif hostname: - # NO_PROXY=.google.com is marked as "all://*.google.com, - # which disables "www.google.com" but not "google.com" - # NO_PROXY=google.com is marked as "all://*google.com, - # which disables "www.google.com" and "google.com". - # (But not "wwwgoogle.com") - # NO_PROXY can include domains, IPv6, IPv4 addresses and "localhost" - # NO_PROXY=example.com,::1,localhost,192.168.0.0/16 - if "://" in hostname: - mounts[hostname] = None - elif is_ipv4_hostname(hostname): - mounts[f"all://{hostname}"] = None - elif is_ipv6_hostname(hostname): - mounts[f"all://[{hostname}]"] = None - elif hostname.lower() == "localhost": - mounts[f"all://{hostname}"] = None - else: - mounts[f"all://*{hostname}"] = None - - return mounts - - -def to_bytes(value: str | bytes, encoding: str = "utf-8") -> bytes: - return value.encode(encoding) if isinstance(value, str) else value - - -def to_str(value: str | bytes, encoding: str = "utf-8") -> str: - return value if isinstance(value, str) else value.decode(encoding) - - -def to_bytes_or_str(value: str, match_type_of: typing.AnyStr) -> typing.AnyStr: - return value if isinstance(match_type_of, str) else value.encode() - - -def unquote(value: str) -> str: - return value[1:-1] if value[0] == value[-1] == '"' else value - - -def guess_content_type(filename: str | None) -> str | None: - if filename: - return mimetypes.guess_type(filename)[0] or "application/octet-stream" - return None - - -def peek_filelike_length(stream: typing.Any) -> int | None: - """ - Given a file-like stream object, return its length in number of bytes - without reading it into memory. - """ - try: - # Is it an actual file? - fd = stream.fileno() - # Yup, seems to be an actual file. - length = os.fstat(fd).st_size - except (AttributeError, OSError): - # No... Maybe it's something that supports random access, like `io.BytesIO`? - try: - # Assuming so, go to end of stream to figure out its length, - # then put it back in place. - offset = stream.tell() - length = stream.seek(0, os.SEEK_END) - stream.seek(offset) - except (AttributeError, OSError): - # Not even that? Sorry, we're doomed... - return None - - return length - - -class Timer: - async def _get_time(self) -> float: - library = sniffio.current_async_library() - if library == "trio": - import trio - - return trio.current_time() - else: - import asyncio - - return asyncio.get_event_loop().time() - - def sync_start(self) -> None: - self.started = time.perf_counter() - - async def async_start(self) -> None: - self.started = await self._get_time() - - def sync_elapsed(self) -> float: - now = time.perf_counter() - return now - self.started - - async def async_elapsed(self) -> float: - now = await self._get_time() - return now - self.started - - -class URLPattern: - """ - A utility class currently used for making lookups against proxy keys... - - # Wildcard matching... - >>> pattern = URLPattern("all://") - >>> pattern.matches(httpx.URL("http://example.com")) - True - - # Witch scheme matching... - >>> pattern = URLPattern("https://") - >>> pattern.matches(httpx.URL("https://example.com")) - True - >>> pattern.matches(httpx.URL("http://example.com")) - False - - # With domain matching... - >>> pattern = URLPattern("https://example.com") - >>> pattern.matches(httpx.URL("https://example.com")) - True - >>> pattern.matches(httpx.URL("http://example.com")) - False - >>> pattern.matches(httpx.URL("https://other.com")) - False - - # Wildcard scheme, with domain matching... - >>> pattern = URLPattern("all://example.com") - >>> pattern.matches(httpx.URL("https://example.com")) - True - >>> pattern.matches(httpx.URL("http://example.com")) - True - >>> pattern.matches(httpx.URL("https://other.com")) - False - - # With port matching... - >>> pattern = URLPattern("https://example.com:1234") - >>> pattern.matches(httpx.URL("https://example.com:1234")) - True - >>> pattern.matches(httpx.URL("https://example.com")) - False - """ - - def __init__(self, pattern: str) -> None: - from ._urls import URL - - if pattern and ":" not in pattern: - raise ValueError( - f"Proxy keys should use proper URL forms rather " - f"than plain scheme strings. " - f'Instead of "{pattern}", use "{pattern}://"' - ) - - url = URL(pattern) - self.pattern = pattern - self.scheme = "" if url.scheme == "all" else url.scheme - self.host = "" if url.host == "*" else url.host - self.port = url.port - if not url.host or url.host == "*": - self.host_regex: typing.Pattern[str] | None = None - elif url.host.startswith("*."): - # *.example.com should match "www.example.com", but not "example.com" - domain = re.escape(url.host[2:]) - self.host_regex = re.compile(f"^.+\\.{domain}$") - elif url.host.startswith("*"): - # *example.com should match "www.example.com" and "example.com" - domain = re.escape(url.host[1:]) - self.host_regex = re.compile(f"^(.+\\.)?{domain}$") - else: - # example.com should match "example.com" but not "www.example.com" - domain = re.escape(url.host) - self.host_regex = re.compile(f"^{domain}$") - - def matches(self, other: URL) -> bool: - if self.scheme and self.scheme != other.scheme: - return False - if ( - self.host - and self.host_regex is not None - and not self.host_regex.match(other.host) - ): - return False - if self.port is not None and self.port != other.port: - return False - return True - - @property - def priority(self) -> tuple[int, int, int]: - """ - The priority allows URLPattern instances to be sortable, so that - we can match from most specific to least specific. - """ - # URLs with a port should take priority over URLs without a port. - port_priority = 0 if self.port is not None else 1 - # Longer hostnames should match first. - host_priority = -len(self.host) - # Longer schemes should match first. - scheme_priority = -len(self.scheme) - return (port_priority, host_priority, scheme_priority) - - def __hash__(self) -> int: - return hash(self.pattern) - - def __lt__(self, other: URLPattern) -> bool: - return self.priority < other.priority - - def __eq__(self, other: typing.Any) -> bool: - return isinstance(other, URLPattern) and self.pattern == other.pattern - - -def is_ipv4_hostname(hostname: str) -> bool: - try: - ipaddress.IPv4Address(hostname.split("/")[0]) - except Exception: - return False - return True - - -def is_ipv6_hostname(hostname: str) -> bool: - try: - ipaddress.IPv6Address(hostname.split("/")[0]) - except Exception: - return False - return True diff --git a/myenv/Lib/site-packages/httpx/py.typed b/myenv/Lib/site-packages/httpx/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/idna-3.10.dist-info/INSTALLER b/myenv/Lib/site-packages/idna-3.10.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/idna-3.10.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/idna-3.10.dist-info/LICENSE.md b/myenv/Lib/site-packages/idna-3.10.dist-info/LICENSE.md deleted file mode 100644 index 19b6b45..0000000 --- a/myenv/Lib/site-packages/idna-3.10.dist-info/LICENSE.md +++ /dev/null @@ -1,31 +0,0 @@ -BSD 3-Clause License - -Copyright (c) 2013-2024, Kim Davies and contributors. -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/Lib/site-packages/idna-3.10.dist-info/METADATA b/myenv/Lib/site-packages/idna-3.10.dist-info/METADATA deleted file mode 100644 index c42623e..0000000 --- a/myenv/Lib/site-packages/idna-3.10.dist-info/METADATA +++ /dev/null @@ -1,250 +0,0 @@ -Metadata-Version: 2.1 -Name: idna -Version: 3.10 -Summary: Internationalized Domain Names in Applications (IDNA) -Author-email: Kim Davies -Requires-Python: >=3.6 -Description-Content-Type: text/x-rst -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Intended Audience :: System Administrators -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3 :: Only -Classifier: Programming Language :: Python :: 3.6 -Classifier: Programming Language :: Python :: 3.7 -Classifier: Programming Language :: Python :: 3.8 -Classifier: Programming Language :: Python :: 3.9 -Classifier: Programming Language :: Python :: 3.10 -Classifier: Programming Language :: Python :: 3.11 -Classifier: Programming Language :: Python :: 3.12 -Classifier: Programming Language :: Python :: 3.13 -Classifier: Programming Language :: Python :: Implementation :: CPython -Classifier: Programming Language :: Python :: Implementation :: PyPy -Classifier: Topic :: Internet :: Name Service (DNS) -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: Utilities -Requires-Dist: ruff >= 0.6.2 ; extra == "all" -Requires-Dist: mypy >= 1.11.2 ; extra == "all" -Requires-Dist: pytest >= 8.3.2 ; extra == "all" -Requires-Dist: flake8 >= 7.1.1 ; extra == "all" -Project-URL: Changelog, https://github.com/kjd/idna/blob/master/HISTORY.rst -Project-URL: Issue tracker, https://github.com/kjd/idna/issues -Project-URL: Source, https://github.com/kjd/idna -Provides-Extra: all - -Internationalized Domain Names in Applications (IDNA) -===================================================== - -Support for the Internationalized Domain Names in -Applications (IDNA) protocol as specified in `RFC 5891 -`_. This is the latest version of -the protocol and is sometimes referred to as “IDNA 2008”. - -This library also provides support for Unicode Technical -Standard 46, `Unicode IDNA Compatibility Processing -`_. - -This acts as a suitable replacement for the “encodings.idna” -module that comes with the Python standard library, but which -only supports the older superseded IDNA specification (`RFC 3490 -`_). - -Basic functions are simply executed: - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - - -Installation ------------- - -This package is available for installation from PyPI: - -.. code-block:: bash - - $ python3 -m pip install idna - - -Usage ------ - -For typical usage, the ``encode`` and ``decode`` functions will take a -domain name argument and perform a conversion to A-labels or U-labels -respectively. - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('ドメイン.テスト') - b'xn--eckwd4c7c.xn--zckzah' - >>> print(idna.decode('xn--eckwd4c7c.xn--zckzah')) - ドメイン.テスト - -You may use the codec encoding and decoding methods using the -``idna.codec`` module: - -.. code-block:: pycon - - >>> import idna.codec - >>> print('домен.испытание'.encode('idna2008')) - b'xn--d1acufc.xn--80akhbyknj4f' - >>> print(b'xn--d1acufc.xn--80akhbyknj4f'.decode('idna2008')) - домен.испытание - -Conversions can be applied at a per-label basis using the ``ulabel`` or -``alabel`` functions if necessary: - -.. code-block:: pycon - - >>> idna.alabel('测试') - b'xn--0zwm56d' - -Compatibility Mapping (UTS #46) -+++++++++++++++++++++++++++++++ - -As described in `RFC 5895 `_, the -IDNA specification does not normalize input from different potential -ways a user may input a domain name. This functionality, known as -a “mapping”, is considered by the specification to be a local -user-interface issue distinct from IDNA conversion functionality. - -This library provides one such mapping that was developed by the -Unicode Consortium. Known as `Unicode IDNA Compatibility Processing -`_, it provides for both a regular -mapping for typical applications, as well as a transitional mapping to -help migrate from older IDNA 2003 applications. Strings are -preprocessed according to Section 4.4 “Preprocessing for IDNA2008” -prior to the IDNA operations. - -For example, “Königsgäßchen” is not a permissible label as *LATIN -CAPITAL LETTER K* is not allowed (nor are capital letters in general). -UTS 46 will convert this into lower case prior to applying the IDNA -conversion. - -.. code-block:: pycon - - >>> import idna - >>> idna.encode('Königsgäßchen') - ... - idna.core.InvalidCodepoint: Codepoint U+004B at position 1 of 'Königsgäßchen' not allowed - >>> idna.encode('Königsgäßchen', uts46=True) - b'xn--knigsgchen-b4a3dun' - >>> print(idna.decode('xn--knigsgchen-b4a3dun')) - königsgäßchen - -Transitional processing provides conversions to help transition from -the older 2003 standard to the current standard. For example, in the -original IDNA specification, the *LATIN SMALL LETTER SHARP S* (ß) was -converted into two *LATIN SMALL LETTER S* (ss), whereas in the current -IDNA specification this conversion is not performed. - -.. code-block:: pycon - - >>> idna.encode('Königsgäßchen', uts46=True, transitional=True) - 'xn--knigsgsschen-lcb0w' - -Implementers should use transitional processing with caution, only in -rare cases where conversion from legacy labels to current labels must be -performed (i.e. IDNA implementations that pre-date 2008). For typical -applications that just need to convert labels, transitional processing -is unlikely to be beneficial and could produce unexpected incompatible -results. - -``encodings.idna`` Compatibility -++++++++++++++++++++++++++++++++ - -Function calls from the Python built-in ``encodings.idna`` module are -mapped to their IDNA 2008 equivalents using the ``idna.compat`` module. -Simply substitute the ``import`` clause in your code to refer to the new -module name. - -Exceptions ----------- - -All errors raised during the conversion following the specification -should raise an exception derived from the ``idna.IDNAError`` base -class. - -More specific exceptions that may be generated as ``idna.IDNABidiError`` -when the error reflects an illegal combination of left-to-right and -right-to-left characters in a label; ``idna.InvalidCodepoint`` when -a specific codepoint is an illegal character in an IDN label (i.e. -INVALID); and ``idna.InvalidCodepointContext`` when the codepoint is -illegal based on its positional context (i.e. it is CONTEXTO or CONTEXTJ -but the contextual requirements are not satisfied.) - -Building and Diagnostics ------------------------- - -The IDNA and UTS 46 functionality relies upon pre-calculated lookup -tables for performance. These tables are derived from computing against -eligibility criteria in the respective standards. These tables are -computed using the command-line script ``tools/idna-data``. - -This tool will fetch relevant codepoint data from the Unicode repository -and perform the required calculations to identify eligibility. There are -three main modes: - -* ``idna-data make-libdata``. Generates ``idnadata.py`` and - ``uts46data.py``, the pre-calculated lookup tables used for IDNA and - UTS 46 conversions. Implementers who wish to track this library against - a different Unicode version may use this tool to manually generate a - different version of the ``idnadata.py`` and ``uts46data.py`` files. - -* ``idna-data make-table``. Generate a table of the IDNA disposition - (e.g. PVALID, CONTEXTJ, CONTEXTO) in the format found in Appendix - B.1 of RFC 5892 and the pre-computed tables published by `IANA - `_. - -* ``idna-data U+0061``. Prints debugging output on the various - properties associated with an individual Unicode codepoint (in this - case, U+0061), that are used to assess the IDNA and UTS 46 status of a - codepoint. This is helpful in debugging or analysis. - -The tool accepts a number of arguments, described using ``idna-data --h``. Most notably, the ``--version`` argument allows the specification -of the version of Unicode to be used in computing the table data. For -example, ``idna-data --version 9.0.0 make-libdata`` will generate -library data against Unicode 9.0.0. - - -Additional Notes ----------------- - -* **Packages**. The latest tagged release version is published in the - `Python Package Index `_. - -* **Version support**. This library supports Python 3.6 and higher. - As this library serves as a low-level toolkit for a variety of - applications, many of which strive for broad compatibility with older - Python versions, there is no rush to remove older interpreter support. - Removing support for older versions should be well justified in that the - maintenance burden has become too high. - -* **Python 2**. Python 2 is supported by version 2.x of this library. - Use "idna<3" in your requirements file if you need this library for - a Python 2 application. Be advised that these versions are no longer - actively developed. - -* **Testing**. The library has a test suite based on each rule of the - IDNA specification, as well as tests that are provided as part of the - Unicode Technical Standard 46, `Unicode IDNA Compatibility Processing - `_. - -* **Emoji**. It is an occasional request to support emoji domains in - this library. Encoding of symbols like emoji is expressly prohibited by - the technical standard IDNA 2008 and emoji domains are broadly phased - out across the domain industry due to associated security risks. For - now, applications that need to support these non-compliant labels - may wish to consider trying the encode/decode operation in this library - first, and then falling back to using `encodings.idna`. See `the Github - project `_ for more discussion. - diff --git a/myenv/Lib/site-packages/idna-3.10.dist-info/RECORD b/myenv/Lib/site-packages/idna-3.10.dist-info/RECORD deleted file mode 100644 index 54f6609..0000000 --- a/myenv/Lib/site-packages/idna-3.10.dist-info/RECORD +++ /dev/null @@ -1,22 +0,0 @@ -idna-3.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -idna-3.10.dist-info/LICENSE.md,sha256=pZ8LDvNjWHQQmkRhykT_enDVBpboFHZ7-vch1Mmw2w8,1541 -idna-3.10.dist-info/METADATA,sha256=URR5ZyDfQ1PCEGhkYoojqfi2Ra0tau2--lhwG4XSfjI,10158 -idna-3.10.dist-info/RECORD,, -idna-3.10.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -idna/__init__.py,sha256=MPqNDLZbXqGaNdXxAFhiqFPKEQXju2jNQhCey6-5eJM,868 -idna/__pycache__/__init__.cpython-312.pyc,, -idna/__pycache__/codec.cpython-312.pyc,, -idna/__pycache__/compat.cpython-312.pyc,, -idna/__pycache__/core.cpython-312.pyc,, -idna/__pycache__/idnadata.cpython-312.pyc,, -idna/__pycache__/intranges.cpython-312.pyc,, -idna/__pycache__/package_data.cpython-312.pyc,, -idna/__pycache__/uts46data.cpython-312.pyc,, -idna/codec.py,sha256=PEew3ItwzjW4hymbasnty2N2OXvNcgHB-JjrBuxHPYY,3422 -idna/compat.py,sha256=RzLy6QQCdl9784aFhb2EX9EKGCJjg0P3PilGdeXXcx8,316 -idna/core.py,sha256=YJYyAMnwiQEPjVC4-Fqu_p4CJ6yKKuDGmppBNQNQpFs,13239 -idna/idnadata.py,sha256=W30GcIGvtOWYwAjZj4ZjuouUutC6ffgNuyjJy7fZ-lo,78306 -idna/intranges.py,sha256=amUtkdhYcQG8Zr-CoMM_kVRacxkivC1WgxN1b63KKdU,1898 -idna/package_data.py,sha256=q59S3OXsc5VI8j6vSD0sGBMyk6zZ4vWFREE88yCJYKs,21 -idna/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -idna/uts46data.py,sha256=rt90K9J40gUSwppDPCrhjgi5AA6pWM65dEGRSf6rIhM,239289 diff --git a/myenv/Lib/site-packages/idna-3.10.dist-info/WHEEL b/myenv/Lib/site-packages/idna-3.10.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/myenv/Lib/site-packages/idna-3.10.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/idna/__init__.py b/myenv/Lib/site-packages/idna/__init__.py deleted file mode 100644 index cfdc030..0000000 --- a/myenv/Lib/site-packages/idna/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -from .core import ( - IDNABidiError, - IDNAError, - InvalidCodepoint, - InvalidCodepointContext, - alabel, - check_bidi, - check_hyphen_ok, - check_initial_combiner, - check_label, - check_nfc, - decode, - encode, - ulabel, - uts46_remap, - valid_contextj, - valid_contexto, - valid_label_length, - valid_string_length, -) -from .intranges import intranges_contain -from .package_data import __version__ - -__all__ = [ - "__version__", - "IDNABidiError", - "IDNAError", - "InvalidCodepoint", - "InvalidCodepointContext", - "alabel", - "check_bidi", - "check_hyphen_ok", - "check_initial_combiner", - "check_label", - "check_nfc", - "decode", - "encode", - "intranges_contain", - "ulabel", - "uts46_remap", - "valid_contextj", - "valid_contexto", - "valid_label_length", - "valid_string_length", -] diff --git a/myenv/Lib/site-packages/idna/codec.py b/myenv/Lib/site-packages/idna/codec.py deleted file mode 100644 index 913abfd..0000000 --- a/myenv/Lib/site-packages/idna/codec.py +++ /dev/null @@ -1,122 +0,0 @@ -import codecs -import re -from typing import Any, Optional, Tuple - -from .core import IDNAError, alabel, decode, encode, ulabel - -_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") - - -class Codec(codecs.Codec): - def encode(self, data: str, errors: str = "strict") -> Tuple[bytes, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return b"", 0 - - return encode(data), len(data) - - def decode(self, data: bytes, errors: str = "strict") -> Tuple[str, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return "", 0 - - return decode(data), len(data) - - -class IncrementalEncoder(codecs.BufferedIncrementalEncoder): - def _buffer_encode(self, data: str, errors: str, final: bool) -> Tuple[bytes, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return b"", 0 - - labels = _unicode_dots_re.split(data) - trailing_dot = b"" - if labels: - if not labels[-1]: - trailing_dot = b"." - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = b"." - - result = [] - size = 0 - for label in labels: - result.append(alabel(label)) - if size: - size += 1 - size += len(label) - - # Join with U+002E - result_bytes = b".".join(result) + trailing_dot - size += len(trailing_dot) - return result_bytes, size - - -class IncrementalDecoder(codecs.BufferedIncrementalDecoder): - def _buffer_decode(self, data: Any, errors: str, final: bool) -> Tuple[str, int]: - if errors != "strict": - raise IDNAError('Unsupported error handling "{}"'.format(errors)) - - if not data: - return ("", 0) - - if not isinstance(data, str): - data = str(data, "ascii") - - labels = _unicode_dots_re.split(data) - trailing_dot = "" - if labels: - if not labels[-1]: - trailing_dot = "." - del labels[-1] - elif not final: - # Keep potentially unfinished label until the next call - del labels[-1] - if labels: - trailing_dot = "." - - result = [] - size = 0 - for label in labels: - result.append(ulabel(label)) - if size: - size += 1 - size += len(label) - - result_str = ".".join(result) + trailing_dot - size += len(trailing_dot) - return (result_str, size) - - -class StreamWriter(Codec, codecs.StreamWriter): - pass - - -class StreamReader(Codec, codecs.StreamReader): - pass - - -def search_function(name: str) -> Optional[codecs.CodecInfo]: - if name != "idna2008": - return None - return codecs.CodecInfo( - name=name, - encode=Codec().encode, - decode=Codec().decode, - incrementalencoder=IncrementalEncoder, - incrementaldecoder=IncrementalDecoder, - streamwriter=StreamWriter, - streamreader=StreamReader, - ) - - -codecs.register(search_function) diff --git a/myenv/Lib/site-packages/idna/compat.py b/myenv/Lib/site-packages/idna/compat.py deleted file mode 100644 index 1df9f2a..0000000 --- a/myenv/Lib/site-packages/idna/compat.py +++ /dev/null @@ -1,15 +0,0 @@ -from typing import Any, Union - -from .core import decode, encode - - -def ToASCII(label: str) -> bytes: - return encode(label) - - -def ToUnicode(label: Union[bytes, bytearray]) -> str: - return decode(label) - - -def nameprep(s: Any) -> None: - raise NotImplementedError("IDNA 2008 does not utilise nameprep protocol") diff --git a/myenv/Lib/site-packages/idna/core.py b/myenv/Lib/site-packages/idna/core.py deleted file mode 100644 index 9115f12..0000000 --- a/myenv/Lib/site-packages/idna/core.py +++ /dev/null @@ -1,437 +0,0 @@ -import bisect -import re -import unicodedata -from typing import Optional, Union - -from . import idnadata -from .intranges import intranges_contain - -_virama_combining_class = 9 -_alabel_prefix = b"xn--" -_unicode_dots_re = re.compile("[\u002e\u3002\uff0e\uff61]") - - -class IDNAError(UnicodeError): - """Base exception for all IDNA-encoding related problems""" - - pass - - -class IDNABidiError(IDNAError): - """Exception when bidirectional requirements are not satisfied""" - - pass - - -class InvalidCodepoint(IDNAError): - """Exception when a disallowed or unallocated codepoint is used""" - - pass - - -class InvalidCodepointContext(IDNAError): - """Exception when the codepoint is not valid in the context it is used""" - - pass - - -def _combining_class(cp: int) -> int: - v = unicodedata.combining(chr(cp)) - if v == 0: - if not unicodedata.name(chr(cp)): - raise ValueError("Unknown character in unicodedata") - return v - - -def _is_script(cp: str, script: str) -> bool: - return intranges_contain(ord(cp), idnadata.scripts[script]) - - -def _punycode(s: str) -> bytes: - return s.encode("punycode") - - -def _unot(s: int) -> str: - return "U+{:04X}".format(s) - - -def valid_label_length(label: Union[bytes, str]) -> bool: - if len(label) > 63: - return False - return True - - -def valid_string_length(label: Union[bytes, str], trailing_dot: bool) -> bool: - if len(label) > (254 if trailing_dot else 253): - return False - return True - - -def check_bidi(label: str, check_ltr: bool = False) -> bool: - # Bidi rules should only be applied if string contains RTL characters - bidi_label = False - for idx, cp in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - if direction == "": - # String likely comes from a newer version of Unicode - raise IDNABidiError("Unknown directionality in label {} at position {}".format(repr(label), idx)) - if direction in ["R", "AL", "AN"]: - bidi_label = True - if not bidi_label and not check_ltr: - return True - - # Bidi rule 1 - direction = unicodedata.bidirectional(label[0]) - if direction in ["R", "AL"]: - rtl = True - elif direction == "L": - rtl = False - else: - raise IDNABidiError("First codepoint in label {} must be directionality L, R or AL".format(repr(label))) - - valid_ending = False - number_type: Optional[str] = None - for idx, cp in enumerate(label, 1): - direction = unicodedata.bidirectional(cp) - - if rtl: - # Bidi rule 2 - if direction not in [ - "R", - "AL", - "AN", - "EN", - "ES", - "CS", - "ET", - "ON", - "BN", - "NSM", - ]: - raise IDNABidiError("Invalid direction for codepoint at position {} in a right-to-left label".format(idx)) - # Bidi rule 3 - if direction in ["R", "AL", "EN", "AN"]: - valid_ending = True - elif direction != "NSM": - valid_ending = False - # Bidi rule 4 - if direction in ["AN", "EN"]: - if not number_type: - number_type = direction - else: - if number_type != direction: - raise IDNABidiError("Can not mix numeral types in a right-to-left label") - else: - # Bidi rule 5 - if direction not in ["L", "EN", "ES", "CS", "ET", "ON", "BN", "NSM"]: - raise IDNABidiError("Invalid direction for codepoint at position {} in a left-to-right label".format(idx)) - # Bidi rule 6 - if direction in ["L", "EN"]: - valid_ending = True - elif direction != "NSM": - valid_ending = False - - if not valid_ending: - raise IDNABidiError("Label ends with illegal codepoint directionality") - - return True - - -def check_initial_combiner(label: str) -> bool: - if unicodedata.category(label[0])[0] == "M": - raise IDNAError("Label begins with an illegal combining character") - return True - - -def check_hyphen_ok(label: str) -> bool: - if label[2:4] == "--": - raise IDNAError("Label has disallowed hyphens in 3rd and 4th position") - if label[0] == "-" or label[-1] == "-": - raise IDNAError("Label must not start or end with a hyphen") - return True - - -def check_nfc(label: str) -> None: - if unicodedata.normalize("NFC", label) != label: - raise IDNAError("Label must be in Normalization Form C") - - -def valid_contextj(label: str, pos: int) -> bool: - cp_value = ord(label[pos]) - - if cp_value == 0x200C: - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - - ok = False - for i in range(pos - 1, -1, -1): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord("T"): - continue - elif joining_type in [ord("L"), ord("D")]: - ok = True - break - else: - break - - if not ok: - return False - - ok = False - for i in range(pos + 1, len(label)): - joining_type = idnadata.joining_types.get(ord(label[i])) - if joining_type == ord("T"): - continue - elif joining_type in [ord("R"), ord("D")]: - ok = True - break - else: - break - return ok - - if cp_value == 0x200D: - if pos > 0: - if _combining_class(ord(label[pos - 1])) == _virama_combining_class: - return True - return False - - else: - return False - - -def valid_contexto(label: str, pos: int, exception: bool = False) -> bool: - cp_value = ord(label[pos]) - - if cp_value == 0x00B7: - if 0 < pos < len(label) - 1: - if ord(label[pos - 1]) == 0x006C and ord(label[pos + 1]) == 0x006C: - return True - return False - - elif cp_value == 0x0375: - if pos < len(label) - 1 and len(label) > 1: - return _is_script(label[pos + 1], "Greek") - return False - - elif cp_value == 0x05F3 or cp_value == 0x05F4: - if pos > 0: - return _is_script(label[pos - 1], "Hebrew") - return False - - elif cp_value == 0x30FB: - for cp in label: - if cp == "\u30fb": - continue - if _is_script(cp, "Hiragana") or _is_script(cp, "Katakana") or _is_script(cp, "Han"): - return True - return False - - elif 0x660 <= cp_value <= 0x669: - for cp in label: - if 0x6F0 <= ord(cp) <= 0x06F9: - return False - return True - - elif 0x6F0 <= cp_value <= 0x6F9: - for cp in label: - if 0x660 <= ord(cp) <= 0x0669: - return False - return True - - return False - - -def check_label(label: Union[str, bytes, bytearray]) -> None: - if isinstance(label, (bytes, bytearray)): - label = label.decode("utf-8") - if len(label) == 0: - raise IDNAError("Empty Label") - - check_nfc(label) - check_hyphen_ok(label) - check_initial_combiner(label) - - for pos, cp in enumerate(label): - cp_value = ord(cp) - if intranges_contain(cp_value, idnadata.codepoint_classes["PVALID"]): - continue - elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTJ"]): - try: - if not valid_contextj(label, pos): - raise InvalidCodepointContext( - "Joiner {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) - ) - except ValueError: - raise IDNAError( - "Unknown codepoint adjacent to joiner {} at position {} in {}".format( - _unot(cp_value), pos + 1, repr(label) - ) - ) - elif intranges_contain(cp_value, idnadata.codepoint_classes["CONTEXTO"]): - if not valid_contexto(label, pos): - raise InvalidCodepointContext( - "Codepoint {} not allowed at position {} in {}".format(_unot(cp_value), pos + 1, repr(label)) - ) - else: - raise InvalidCodepoint( - "Codepoint {} at position {} of {} not allowed".format(_unot(cp_value), pos + 1, repr(label)) - ) - - check_bidi(label) - - -def alabel(label: str) -> bytes: - try: - label_bytes = label.encode("ascii") - ulabel(label_bytes) - if not valid_label_length(label_bytes): - raise IDNAError("Label too long") - return label_bytes - except UnicodeEncodeError: - pass - - check_label(label) - label_bytes = _alabel_prefix + _punycode(label) - - if not valid_label_length(label_bytes): - raise IDNAError("Label too long") - - return label_bytes - - -def ulabel(label: Union[str, bytes, bytearray]) -> str: - if not isinstance(label, (bytes, bytearray)): - try: - label_bytes = label.encode("ascii") - except UnicodeEncodeError: - check_label(label) - return label - else: - label_bytes = label - - label_bytes = label_bytes.lower() - if label_bytes.startswith(_alabel_prefix): - label_bytes = label_bytes[len(_alabel_prefix) :] - if not label_bytes: - raise IDNAError("Malformed A-label, no Punycode eligible content found") - if label_bytes.decode("ascii")[-1] == "-": - raise IDNAError("A-label must not end with a hyphen") - else: - check_label(label_bytes) - return label_bytes.decode("ascii") - - try: - label = label_bytes.decode("punycode") - except UnicodeError: - raise IDNAError("Invalid A-label") - check_label(label) - return label - - -def uts46_remap(domain: str, std3_rules: bool = True, transitional: bool = False) -> str: - """Re-map the characters in the string according to UTS46 processing.""" - from .uts46data import uts46data - - output = "" - - for pos, char in enumerate(domain): - code_point = ord(char) - try: - uts46row = uts46data[code_point if code_point < 256 else bisect.bisect_left(uts46data, (code_point, "Z")) - 1] - status = uts46row[1] - replacement: Optional[str] = None - if len(uts46row) == 3: - replacement = uts46row[2] - if ( - status == "V" - or (status == "D" and not transitional) - or (status == "3" and not std3_rules and replacement is None) - ): - output += char - elif replacement is not None and ( - status == "M" or (status == "3" and not std3_rules) or (status == "D" and transitional) - ): - output += replacement - elif status != "I": - raise IndexError() - except IndexError: - raise InvalidCodepoint( - "Codepoint {} not allowed at position {} in {}".format(_unot(code_point), pos + 1, repr(domain)) - ) - - return unicodedata.normalize("NFC", output) - - -def encode( - s: Union[str, bytes, bytearray], - strict: bool = False, - uts46: bool = False, - std3_rules: bool = False, - transitional: bool = False, -) -> bytes: - if not isinstance(s, str): - try: - s = str(s, "ascii") - except UnicodeDecodeError: - raise IDNAError("should pass a unicode string to the function rather than a byte string.") - if uts46: - s = uts46_remap(s, std3_rules, transitional) - trailing_dot = False - result = [] - if strict: - labels = s.split(".") - else: - labels = _unicode_dots_re.split(s) - if not labels or labels == [""]: - raise IDNAError("Empty domain") - if labels[-1] == "": - del labels[-1] - trailing_dot = True - for label in labels: - s = alabel(label) - if s: - result.append(s) - else: - raise IDNAError("Empty label") - if trailing_dot: - result.append(b"") - s = b".".join(result) - if not valid_string_length(s, trailing_dot): - raise IDNAError("Domain too long") - return s - - -def decode( - s: Union[str, bytes, bytearray], - strict: bool = False, - uts46: bool = False, - std3_rules: bool = False, -) -> str: - try: - if not isinstance(s, str): - s = str(s, "ascii") - except UnicodeDecodeError: - raise IDNAError("Invalid ASCII in A-label") - if uts46: - s = uts46_remap(s, std3_rules, False) - trailing_dot = False - result = [] - if not strict: - labels = _unicode_dots_re.split(s) - else: - labels = s.split(".") - if not labels or labels == [""]: - raise IDNAError("Empty domain") - if not labels[-1]: - del labels[-1] - trailing_dot = True - for label in labels: - s = ulabel(label) - if s: - result.append(s) - else: - raise IDNAError("Empty label") - if trailing_dot: - result.append("") - return ".".join(result) diff --git a/myenv/Lib/site-packages/idna/idnadata.py b/myenv/Lib/site-packages/idna/idnadata.py deleted file mode 100644 index 4be6004..0000000 --- a/myenv/Lib/site-packages/idna/idnadata.py +++ /dev/null @@ -1,4243 +0,0 @@ -# This file is automatically generated by tools/idna-data - -__version__ = "15.1.0" -scripts = { - "Greek": ( - 0x37000000374, - 0x37500000378, - 0x37A0000037E, - 0x37F00000380, - 0x38400000385, - 0x38600000387, - 0x3880000038B, - 0x38C0000038D, - 0x38E000003A2, - 0x3A3000003E2, - 0x3F000000400, - 0x1D2600001D2B, - 0x1D5D00001D62, - 0x1D6600001D6B, - 0x1DBF00001DC0, - 0x1F0000001F16, - 0x1F1800001F1E, - 0x1F2000001F46, - 0x1F4800001F4E, - 0x1F5000001F58, - 0x1F5900001F5A, - 0x1F5B00001F5C, - 0x1F5D00001F5E, - 0x1F5F00001F7E, - 0x1F8000001FB5, - 0x1FB600001FC5, - 0x1FC600001FD4, - 0x1FD600001FDC, - 0x1FDD00001FF0, - 0x1FF200001FF5, - 0x1FF600001FFF, - 0x212600002127, - 0xAB650000AB66, - 0x101400001018F, - 0x101A0000101A1, - 0x1D2000001D246, - ), - "Han": ( - 0x2E8000002E9A, - 0x2E9B00002EF4, - 0x2F0000002FD6, - 0x300500003006, - 0x300700003008, - 0x30210000302A, - 0x30380000303C, - 0x340000004DC0, - 0x4E000000A000, - 0xF9000000FA6E, - 0xFA700000FADA, - 0x16FE200016FE4, - 0x16FF000016FF2, - 0x200000002A6E0, - 0x2A7000002B73A, - 0x2B7400002B81E, - 0x2B8200002CEA2, - 0x2CEB00002EBE1, - 0x2EBF00002EE5E, - 0x2F8000002FA1E, - 0x300000003134B, - 0x31350000323B0, - ), - "Hebrew": ( - 0x591000005C8, - 0x5D0000005EB, - 0x5EF000005F5, - 0xFB1D0000FB37, - 0xFB380000FB3D, - 0xFB3E0000FB3F, - 0xFB400000FB42, - 0xFB430000FB45, - 0xFB460000FB50, - ), - "Hiragana": ( - 0x304100003097, - 0x309D000030A0, - 0x1B0010001B120, - 0x1B1320001B133, - 0x1B1500001B153, - 0x1F2000001F201, - ), - "Katakana": ( - 0x30A1000030FB, - 0x30FD00003100, - 0x31F000003200, - 0x32D0000032FF, - 0x330000003358, - 0xFF660000FF70, - 0xFF710000FF9E, - 0x1AFF00001AFF4, - 0x1AFF50001AFFC, - 0x1AFFD0001AFFF, - 0x1B0000001B001, - 0x1B1200001B123, - 0x1B1550001B156, - 0x1B1640001B168, - ), -} -joining_types = { - 0xAD: 84, - 0x300: 84, - 0x301: 84, - 0x302: 84, - 0x303: 84, - 0x304: 84, - 0x305: 84, - 0x306: 84, - 0x307: 84, - 0x308: 84, - 0x309: 84, - 0x30A: 84, - 0x30B: 84, - 0x30C: 84, - 0x30D: 84, - 0x30E: 84, - 0x30F: 84, - 0x310: 84, - 0x311: 84, - 0x312: 84, - 0x313: 84, - 0x314: 84, - 0x315: 84, - 0x316: 84, - 0x317: 84, - 0x318: 84, - 0x319: 84, - 0x31A: 84, - 0x31B: 84, - 0x31C: 84, - 0x31D: 84, - 0x31E: 84, - 0x31F: 84, - 0x320: 84, - 0x321: 84, - 0x322: 84, - 0x323: 84, - 0x324: 84, - 0x325: 84, - 0x326: 84, - 0x327: 84, - 0x328: 84, - 0x329: 84, - 0x32A: 84, - 0x32B: 84, - 0x32C: 84, - 0x32D: 84, - 0x32E: 84, - 0x32F: 84, - 0x330: 84, - 0x331: 84, - 0x332: 84, - 0x333: 84, - 0x334: 84, - 0x335: 84, - 0x336: 84, - 0x337: 84, - 0x338: 84, - 0x339: 84, - 0x33A: 84, - 0x33B: 84, - 0x33C: 84, - 0x33D: 84, - 0x33E: 84, - 0x33F: 84, - 0x340: 84, - 0x341: 84, - 0x342: 84, - 0x343: 84, - 0x344: 84, - 0x345: 84, - 0x346: 84, - 0x347: 84, - 0x348: 84, - 0x349: 84, - 0x34A: 84, - 0x34B: 84, - 0x34C: 84, - 0x34D: 84, - 0x34E: 84, - 0x34F: 84, - 0x350: 84, - 0x351: 84, - 0x352: 84, - 0x353: 84, - 0x354: 84, - 0x355: 84, - 0x356: 84, - 0x357: 84, - 0x358: 84, - 0x359: 84, - 0x35A: 84, - 0x35B: 84, - 0x35C: 84, - 0x35D: 84, - 0x35E: 84, - 0x35F: 84, - 0x360: 84, - 0x361: 84, - 0x362: 84, - 0x363: 84, - 0x364: 84, - 0x365: 84, - 0x366: 84, - 0x367: 84, - 0x368: 84, - 0x369: 84, - 0x36A: 84, - 0x36B: 84, - 0x36C: 84, - 0x36D: 84, - 0x36E: 84, - 0x36F: 84, - 0x483: 84, - 0x484: 84, - 0x485: 84, - 0x486: 84, - 0x487: 84, - 0x488: 84, - 0x489: 84, - 0x591: 84, - 0x592: 84, - 0x593: 84, - 0x594: 84, - 0x595: 84, - 0x596: 84, - 0x597: 84, - 0x598: 84, - 0x599: 84, - 0x59A: 84, - 0x59B: 84, - 0x59C: 84, - 0x59D: 84, - 0x59E: 84, - 0x59F: 84, - 0x5A0: 84, - 0x5A1: 84, - 0x5A2: 84, - 0x5A3: 84, - 0x5A4: 84, - 0x5A5: 84, - 0x5A6: 84, - 0x5A7: 84, - 0x5A8: 84, - 0x5A9: 84, - 0x5AA: 84, - 0x5AB: 84, - 0x5AC: 84, - 0x5AD: 84, - 0x5AE: 84, - 0x5AF: 84, - 0x5B0: 84, - 0x5B1: 84, - 0x5B2: 84, - 0x5B3: 84, - 0x5B4: 84, - 0x5B5: 84, - 0x5B6: 84, - 0x5B7: 84, - 0x5B8: 84, - 0x5B9: 84, - 0x5BA: 84, - 0x5BB: 84, - 0x5BC: 84, - 0x5BD: 84, - 0x5BF: 84, - 0x5C1: 84, - 0x5C2: 84, - 0x5C4: 84, - 0x5C5: 84, - 0x5C7: 84, - 0x610: 84, - 0x611: 84, - 0x612: 84, - 0x613: 84, - 0x614: 84, - 0x615: 84, - 0x616: 84, - 0x617: 84, - 0x618: 84, - 0x619: 84, - 0x61A: 84, - 0x61C: 84, - 0x620: 68, - 0x622: 82, - 0x623: 82, - 0x624: 82, - 0x625: 82, - 0x626: 68, - 0x627: 82, - 0x628: 68, - 0x629: 82, - 0x62A: 68, - 0x62B: 68, - 0x62C: 68, - 0x62D: 68, - 0x62E: 68, - 0x62F: 82, - 0x630: 82, - 0x631: 82, - 0x632: 82, - 0x633: 68, - 0x634: 68, - 0x635: 68, - 0x636: 68, - 0x637: 68, - 0x638: 68, - 0x639: 68, - 0x63A: 68, - 0x63B: 68, - 0x63C: 68, - 0x63D: 68, - 0x63E: 68, - 0x63F: 68, - 0x640: 67, - 0x641: 68, - 0x642: 68, - 0x643: 68, - 0x644: 68, - 0x645: 68, - 0x646: 68, - 0x647: 68, - 0x648: 82, - 0x649: 68, - 0x64A: 68, - 0x64B: 84, - 0x64C: 84, - 0x64D: 84, - 0x64E: 84, - 0x64F: 84, - 0x650: 84, - 0x651: 84, - 0x652: 84, - 0x653: 84, - 0x654: 84, - 0x655: 84, - 0x656: 84, - 0x657: 84, - 0x658: 84, - 0x659: 84, - 0x65A: 84, - 0x65B: 84, - 0x65C: 84, - 0x65D: 84, - 0x65E: 84, - 0x65F: 84, - 0x66E: 68, - 0x66F: 68, - 0x670: 84, - 0x671: 82, - 0x672: 82, - 0x673: 82, - 0x675: 82, - 0x676: 82, - 0x677: 82, - 0x678: 68, - 0x679: 68, - 0x67A: 68, - 0x67B: 68, - 0x67C: 68, - 0x67D: 68, - 0x67E: 68, - 0x67F: 68, - 0x680: 68, - 0x681: 68, - 0x682: 68, - 0x683: 68, - 0x684: 68, - 0x685: 68, - 0x686: 68, - 0x687: 68, - 0x688: 82, - 0x689: 82, - 0x68A: 82, - 0x68B: 82, - 0x68C: 82, - 0x68D: 82, - 0x68E: 82, - 0x68F: 82, - 0x690: 82, - 0x691: 82, - 0x692: 82, - 0x693: 82, - 0x694: 82, - 0x695: 82, - 0x696: 82, - 0x697: 82, - 0x698: 82, - 0x699: 82, - 0x69A: 68, - 0x69B: 68, - 0x69C: 68, - 0x69D: 68, - 0x69E: 68, - 0x69F: 68, - 0x6A0: 68, - 0x6A1: 68, - 0x6A2: 68, - 0x6A3: 68, - 0x6A4: 68, - 0x6A5: 68, - 0x6A6: 68, - 0x6A7: 68, - 0x6A8: 68, - 0x6A9: 68, - 0x6AA: 68, - 0x6AB: 68, - 0x6AC: 68, - 0x6AD: 68, - 0x6AE: 68, - 0x6AF: 68, - 0x6B0: 68, - 0x6B1: 68, - 0x6B2: 68, - 0x6B3: 68, - 0x6B4: 68, - 0x6B5: 68, - 0x6B6: 68, - 0x6B7: 68, - 0x6B8: 68, - 0x6B9: 68, - 0x6BA: 68, - 0x6BB: 68, - 0x6BC: 68, - 0x6BD: 68, - 0x6BE: 68, - 0x6BF: 68, - 0x6C0: 82, - 0x6C1: 68, - 0x6C2: 68, - 0x6C3: 82, - 0x6C4: 82, - 0x6C5: 82, - 0x6C6: 82, - 0x6C7: 82, - 0x6C8: 82, - 0x6C9: 82, - 0x6CA: 82, - 0x6CB: 82, - 0x6CC: 68, - 0x6CD: 82, - 0x6CE: 68, - 0x6CF: 82, - 0x6D0: 68, - 0x6D1: 68, - 0x6D2: 82, - 0x6D3: 82, - 0x6D5: 82, - 0x6D6: 84, - 0x6D7: 84, - 0x6D8: 84, - 0x6D9: 84, - 0x6DA: 84, - 0x6DB: 84, - 0x6DC: 84, - 0x6DF: 84, - 0x6E0: 84, - 0x6E1: 84, - 0x6E2: 84, - 0x6E3: 84, - 0x6E4: 84, - 0x6E7: 84, - 0x6E8: 84, - 0x6EA: 84, - 0x6EB: 84, - 0x6EC: 84, - 0x6ED: 84, - 0x6EE: 82, - 0x6EF: 82, - 0x6FA: 68, - 0x6FB: 68, - 0x6FC: 68, - 0x6FF: 68, - 0x70F: 84, - 0x710: 82, - 0x711: 84, - 0x712: 68, - 0x713: 68, - 0x714: 68, - 0x715: 82, - 0x716: 82, - 0x717: 82, - 0x718: 82, - 0x719: 82, - 0x71A: 68, - 0x71B: 68, - 0x71C: 68, - 0x71D: 68, - 0x71E: 82, - 0x71F: 68, - 0x720: 68, - 0x721: 68, - 0x722: 68, - 0x723: 68, - 0x724: 68, - 0x725: 68, - 0x726: 68, - 0x727: 68, - 0x728: 82, - 0x729: 68, - 0x72A: 82, - 0x72B: 68, - 0x72C: 82, - 0x72D: 68, - 0x72E: 68, - 0x72F: 82, - 0x730: 84, - 0x731: 84, - 0x732: 84, - 0x733: 84, - 0x734: 84, - 0x735: 84, - 0x736: 84, - 0x737: 84, - 0x738: 84, - 0x739: 84, - 0x73A: 84, - 0x73B: 84, - 0x73C: 84, - 0x73D: 84, - 0x73E: 84, - 0x73F: 84, - 0x740: 84, - 0x741: 84, - 0x742: 84, - 0x743: 84, - 0x744: 84, - 0x745: 84, - 0x746: 84, - 0x747: 84, - 0x748: 84, - 0x749: 84, - 0x74A: 84, - 0x74D: 82, - 0x74E: 68, - 0x74F: 68, - 0x750: 68, - 0x751: 68, - 0x752: 68, - 0x753: 68, - 0x754: 68, - 0x755: 68, - 0x756: 68, - 0x757: 68, - 0x758: 68, - 0x759: 82, - 0x75A: 82, - 0x75B: 82, - 0x75C: 68, - 0x75D: 68, - 0x75E: 68, - 0x75F: 68, - 0x760: 68, - 0x761: 68, - 0x762: 68, - 0x763: 68, - 0x764: 68, - 0x765: 68, - 0x766: 68, - 0x767: 68, - 0x768: 68, - 0x769: 68, - 0x76A: 68, - 0x76B: 82, - 0x76C: 82, - 0x76D: 68, - 0x76E: 68, - 0x76F: 68, - 0x770: 68, - 0x771: 82, - 0x772: 68, - 0x773: 82, - 0x774: 82, - 0x775: 68, - 0x776: 68, - 0x777: 68, - 0x778: 82, - 0x779: 82, - 0x77A: 68, - 0x77B: 68, - 0x77C: 68, - 0x77D: 68, - 0x77E: 68, - 0x77F: 68, - 0x7A6: 84, - 0x7A7: 84, - 0x7A8: 84, - 0x7A9: 84, - 0x7AA: 84, - 0x7AB: 84, - 0x7AC: 84, - 0x7AD: 84, - 0x7AE: 84, - 0x7AF: 84, - 0x7B0: 84, - 0x7CA: 68, - 0x7CB: 68, - 0x7CC: 68, - 0x7CD: 68, - 0x7CE: 68, - 0x7CF: 68, - 0x7D0: 68, - 0x7D1: 68, - 0x7D2: 68, - 0x7D3: 68, - 0x7D4: 68, - 0x7D5: 68, - 0x7D6: 68, - 0x7D7: 68, - 0x7D8: 68, - 0x7D9: 68, - 0x7DA: 68, - 0x7DB: 68, - 0x7DC: 68, - 0x7DD: 68, - 0x7DE: 68, - 0x7DF: 68, - 0x7E0: 68, - 0x7E1: 68, - 0x7E2: 68, - 0x7E3: 68, - 0x7E4: 68, - 0x7E5: 68, - 0x7E6: 68, - 0x7E7: 68, - 0x7E8: 68, - 0x7E9: 68, - 0x7EA: 68, - 0x7EB: 84, - 0x7EC: 84, - 0x7ED: 84, - 0x7EE: 84, - 0x7EF: 84, - 0x7F0: 84, - 0x7F1: 84, - 0x7F2: 84, - 0x7F3: 84, - 0x7FA: 67, - 0x7FD: 84, - 0x816: 84, - 0x817: 84, - 0x818: 84, - 0x819: 84, - 0x81B: 84, - 0x81C: 84, - 0x81D: 84, - 0x81E: 84, - 0x81F: 84, - 0x820: 84, - 0x821: 84, - 0x822: 84, - 0x823: 84, - 0x825: 84, - 0x826: 84, - 0x827: 84, - 0x829: 84, - 0x82A: 84, - 0x82B: 84, - 0x82C: 84, - 0x82D: 84, - 0x840: 82, - 0x841: 68, - 0x842: 68, - 0x843: 68, - 0x844: 68, - 0x845: 68, - 0x846: 82, - 0x847: 82, - 0x848: 68, - 0x849: 82, - 0x84A: 68, - 0x84B: 68, - 0x84C: 68, - 0x84D: 68, - 0x84E: 68, - 0x84F: 68, - 0x850: 68, - 0x851: 68, - 0x852: 68, - 0x853: 68, - 0x854: 82, - 0x855: 68, - 0x856: 82, - 0x857: 82, - 0x858: 82, - 0x859: 84, - 0x85A: 84, - 0x85B: 84, - 0x860: 68, - 0x862: 68, - 0x863: 68, - 0x864: 68, - 0x865: 68, - 0x867: 82, - 0x868: 68, - 0x869: 82, - 0x86A: 82, - 0x870: 82, - 0x871: 82, - 0x872: 82, - 0x873: 82, - 0x874: 82, - 0x875: 82, - 0x876: 82, - 0x877: 82, - 0x878: 82, - 0x879: 82, - 0x87A: 82, - 0x87B: 82, - 0x87C: 82, - 0x87D: 82, - 0x87E: 82, - 0x87F: 82, - 0x880: 82, - 0x881: 82, - 0x882: 82, - 0x883: 67, - 0x884: 67, - 0x885: 67, - 0x886: 68, - 0x889: 68, - 0x88A: 68, - 0x88B: 68, - 0x88C: 68, - 0x88D: 68, - 0x88E: 82, - 0x898: 84, - 0x899: 84, - 0x89A: 84, - 0x89B: 84, - 0x89C: 84, - 0x89D: 84, - 0x89E: 84, - 0x89F: 84, - 0x8A0: 68, - 0x8A1: 68, - 0x8A2: 68, - 0x8A3: 68, - 0x8A4: 68, - 0x8A5: 68, - 0x8A6: 68, - 0x8A7: 68, - 0x8A8: 68, - 0x8A9: 68, - 0x8AA: 82, - 0x8AB: 82, - 0x8AC: 82, - 0x8AE: 82, - 0x8AF: 68, - 0x8B0: 68, - 0x8B1: 82, - 0x8B2: 82, - 0x8B3: 68, - 0x8B4: 68, - 0x8B5: 68, - 0x8B6: 68, - 0x8B7: 68, - 0x8B8: 68, - 0x8B9: 82, - 0x8BA: 68, - 0x8BB: 68, - 0x8BC: 68, - 0x8BD: 68, - 0x8BE: 68, - 0x8BF: 68, - 0x8C0: 68, - 0x8C1: 68, - 0x8C2: 68, - 0x8C3: 68, - 0x8C4: 68, - 0x8C5: 68, - 0x8C6: 68, - 0x8C7: 68, - 0x8C8: 68, - 0x8CA: 84, - 0x8CB: 84, - 0x8CC: 84, - 0x8CD: 84, - 0x8CE: 84, - 0x8CF: 84, - 0x8D0: 84, - 0x8D1: 84, - 0x8D2: 84, - 0x8D3: 84, - 0x8D4: 84, - 0x8D5: 84, - 0x8D6: 84, - 0x8D7: 84, - 0x8D8: 84, - 0x8D9: 84, - 0x8DA: 84, - 0x8DB: 84, - 0x8DC: 84, - 0x8DD: 84, - 0x8DE: 84, - 0x8DF: 84, - 0x8E0: 84, - 0x8E1: 84, - 0x8E3: 84, - 0x8E4: 84, - 0x8E5: 84, - 0x8E6: 84, - 0x8E7: 84, - 0x8E8: 84, - 0x8E9: 84, - 0x8EA: 84, - 0x8EB: 84, - 0x8EC: 84, - 0x8ED: 84, - 0x8EE: 84, - 0x8EF: 84, - 0x8F0: 84, - 0x8F1: 84, - 0x8F2: 84, - 0x8F3: 84, - 0x8F4: 84, - 0x8F5: 84, - 0x8F6: 84, - 0x8F7: 84, - 0x8F8: 84, - 0x8F9: 84, - 0x8FA: 84, - 0x8FB: 84, - 0x8FC: 84, - 0x8FD: 84, - 0x8FE: 84, - 0x8FF: 84, - 0x900: 84, - 0x901: 84, - 0x902: 84, - 0x93A: 84, - 0x93C: 84, - 0x941: 84, - 0x942: 84, - 0x943: 84, - 0x944: 84, - 0x945: 84, - 0x946: 84, - 0x947: 84, - 0x948: 84, - 0x94D: 84, - 0x951: 84, - 0x952: 84, - 0x953: 84, - 0x954: 84, - 0x955: 84, - 0x956: 84, - 0x957: 84, - 0x962: 84, - 0x963: 84, - 0x981: 84, - 0x9BC: 84, - 0x9C1: 84, - 0x9C2: 84, - 0x9C3: 84, - 0x9C4: 84, - 0x9CD: 84, - 0x9E2: 84, - 0x9E3: 84, - 0x9FE: 84, - 0xA01: 84, - 0xA02: 84, - 0xA3C: 84, - 0xA41: 84, - 0xA42: 84, - 0xA47: 84, - 0xA48: 84, - 0xA4B: 84, - 0xA4C: 84, - 0xA4D: 84, - 0xA51: 84, - 0xA70: 84, - 0xA71: 84, - 0xA75: 84, - 0xA81: 84, - 0xA82: 84, - 0xABC: 84, - 0xAC1: 84, - 0xAC2: 84, - 0xAC3: 84, - 0xAC4: 84, - 0xAC5: 84, - 0xAC7: 84, - 0xAC8: 84, - 0xACD: 84, - 0xAE2: 84, - 0xAE3: 84, - 0xAFA: 84, - 0xAFB: 84, - 0xAFC: 84, - 0xAFD: 84, - 0xAFE: 84, - 0xAFF: 84, - 0xB01: 84, - 0xB3C: 84, - 0xB3F: 84, - 0xB41: 84, - 0xB42: 84, - 0xB43: 84, - 0xB44: 84, - 0xB4D: 84, - 0xB55: 84, - 0xB56: 84, - 0xB62: 84, - 0xB63: 84, - 0xB82: 84, - 0xBC0: 84, - 0xBCD: 84, - 0xC00: 84, - 0xC04: 84, - 0xC3C: 84, - 0xC3E: 84, - 0xC3F: 84, - 0xC40: 84, - 0xC46: 84, - 0xC47: 84, - 0xC48: 84, - 0xC4A: 84, - 0xC4B: 84, - 0xC4C: 84, - 0xC4D: 84, - 0xC55: 84, - 0xC56: 84, - 0xC62: 84, - 0xC63: 84, - 0xC81: 84, - 0xCBC: 84, - 0xCBF: 84, - 0xCC6: 84, - 0xCCC: 84, - 0xCCD: 84, - 0xCE2: 84, - 0xCE3: 84, - 0xD00: 84, - 0xD01: 84, - 0xD3B: 84, - 0xD3C: 84, - 0xD41: 84, - 0xD42: 84, - 0xD43: 84, - 0xD44: 84, - 0xD4D: 84, - 0xD62: 84, - 0xD63: 84, - 0xD81: 84, - 0xDCA: 84, - 0xDD2: 84, - 0xDD3: 84, - 0xDD4: 84, - 0xDD6: 84, - 0xE31: 84, - 0xE34: 84, - 0xE35: 84, - 0xE36: 84, - 0xE37: 84, - 0xE38: 84, - 0xE39: 84, - 0xE3A: 84, - 0xE47: 84, - 0xE48: 84, - 0xE49: 84, - 0xE4A: 84, - 0xE4B: 84, - 0xE4C: 84, - 0xE4D: 84, - 0xE4E: 84, - 0xEB1: 84, - 0xEB4: 84, - 0xEB5: 84, - 0xEB6: 84, - 0xEB7: 84, - 0xEB8: 84, - 0xEB9: 84, - 0xEBA: 84, - 0xEBB: 84, - 0xEBC: 84, - 0xEC8: 84, - 0xEC9: 84, - 0xECA: 84, - 0xECB: 84, - 0xECC: 84, - 0xECD: 84, - 0xECE: 84, - 0xF18: 84, - 0xF19: 84, - 0xF35: 84, - 0xF37: 84, - 0xF39: 84, - 0xF71: 84, - 0xF72: 84, - 0xF73: 84, - 0xF74: 84, - 0xF75: 84, - 0xF76: 84, - 0xF77: 84, - 0xF78: 84, - 0xF79: 84, - 0xF7A: 84, - 0xF7B: 84, - 0xF7C: 84, - 0xF7D: 84, - 0xF7E: 84, - 0xF80: 84, - 0xF81: 84, - 0xF82: 84, - 0xF83: 84, - 0xF84: 84, - 0xF86: 84, - 0xF87: 84, - 0xF8D: 84, - 0xF8E: 84, - 0xF8F: 84, - 0xF90: 84, - 0xF91: 84, - 0xF92: 84, - 0xF93: 84, - 0xF94: 84, - 0xF95: 84, - 0xF96: 84, - 0xF97: 84, - 0xF99: 84, - 0xF9A: 84, - 0xF9B: 84, - 0xF9C: 84, - 0xF9D: 84, - 0xF9E: 84, - 0xF9F: 84, - 0xFA0: 84, - 0xFA1: 84, - 0xFA2: 84, - 0xFA3: 84, - 0xFA4: 84, - 0xFA5: 84, - 0xFA6: 84, - 0xFA7: 84, - 0xFA8: 84, - 0xFA9: 84, - 0xFAA: 84, - 0xFAB: 84, - 0xFAC: 84, - 0xFAD: 84, - 0xFAE: 84, - 0xFAF: 84, - 0xFB0: 84, - 0xFB1: 84, - 0xFB2: 84, - 0xFB3: 84, - 0xFB4: 84, - 0xFB5: 84, - 0xFB6: 84, - 0xFB7: 84, - 0xFB8: 84, - 0xFB9: 84, - 0xFBA: 84, - 0xFBB: 84, - 0xFBC: 84, - 0xFC6: 84, - 0x102D: 84, - 0x102E: 84, - 0x102F: 84, - 0x1030: 84, - 0x1032: 84, - 0x1033: 84, - 0x1034: 84, - 0x1035: 84, - 0x1036: 84, - 0x1037: 84, - 0x1039: 84, - 0x103A: 84, - 0x103D: 84, - 0x103E: 84, - 0x1058: 84, - 0x1059: 84, - 0x105E: 84, - 0x105F: 84, - 0x1060: 84, - 0x1071: 84, - 0x1072: 84, - 0x1073: 84, - 0x1074: 84, - 0x1082: 84, - 0x1085: 84, - 0x1086: 84, - 0x108D: 84, - 0x109D: 84, - 0x135D: 84, - 0x135E: 84, - 0x135F: 84, - 0x1712: 84, - 0x1713: 84, - 0x1714: 84, - 0x1732: 84, - 0x1733: 84, - 0x1752: 84, - 0x1753: 84, - 0x1772: 84, - 0x1773: 84, - 0x17B4: 84, - 0x17B5: 84, - 0x17B7: 84, - 0x17B8: 84, - 0x17B9: 84, - 0x17BA: 84, - 0x17BB: 84, - 0x17BC: 84, - 0x17BD: 84, - 0x17C6: 84, - 0x17C9: 84, - 0x17CA: 84, - 0x17CB: 84, - 0x17CC: 84, - 0x17CD: 84, - 0x17CE: 84, - 0x17CF: 84, - 0x17D0: 84, - 0x17D1: 84, - 0x17D2: 84, - 0x17D3: 84, - 0x17DD: 84, - 0x1807: 68, - 0x180A: 67, - 0x180B: 84, - 0x180C: 84, - 0x180D: 84, - 0x180F: 84, - 0x1820: 68, - 0x1821: 68, - 0x1822: 68, - 0x1823: 68, - 0x1824: 68, - 0x1825: 68, - 0x1826: 68, - 0x1827: 68, - 0x1828: 68, - 0x1829: 68, - 0x182A: 68, - 0x182B: 68, - 0x182C: 68, - 0x182D: 68, - 0x182E: 68, - 0x182F: 68, - 0x1830: 68, - 0x1831: 68, - 0x1832: 68, - 0x1833: 68, - 0x1834: 68, - 0x1835: 68, - 0x1836: 68, - 0x1837: 68, - 0x1838: 68, - 0x1839: 68, - 0x183A: 68, - 0x183B: 68, - 0x183C: 68, - 0x183D: 68, - 0x183E: 68, - 0x183F: 68, - 0x1840: 68, - 0x1841: 68, - 0x1842: 68, - 0x1843: 68, - 0x1844: 68, - 0x1845: 68, - 0x1846: 68, - 0x1847: 68, - 0x1848: 68, - 0x1849: 68, - 0x184A: 68, - 0x184B: 68, - 0x184C: 68, - 0x184D: 68, - 0x184E: 68, - 0x184F: 68, - 0x1850: 68, - 0x1851: 68, - 0x1852: 68, - 0x1853: 68, - 0x1854: 68, - 0x1855: 68, - 0x1856: 68, - 0x1857: 68, - 0x1858: 68, - 0x1859: 68, - 0x185A: 68, - 0x185B: 68, - 0x185C: 68, - 0x185D: 68, - 0x185E: 68, - 0x185F: 68, - 0x1860: 68, - 0x1861: 68, - 0x1862: 68, - 0x1863: 68, - 0x1864: 68, - 0x1865: 68, - 0x1866: 68, - 0x1867: 68, - 0x1868: 68, - 0x1869: 68, - 0x186A: 68, - 0x186B: 68, - 0x186C: 68, - 0x186D: 68, - 0x186E: 68, - 0x186F: 68, - 0x1870: 68, - 0x1871: 68, - 0x1872: 68, - 0x1873: 68, - 0x1874: 68, - 0x1875: 68, - 0x1876: 68, - 0x1877: 68, - 0x1878: 68, - 0x1885: 84, - 0x1886: 84, - 0x1887: 68, - 0x1888: 68, - 0x1889: 68, - 0x188A: 68, - 0x188B: 68, - 0x188C: 68, - 0x188D: 68, - 0x188E: 68, - 0x188F: 68, - 0x1890: 68, - 0x1891: 68, - 0x1892: 68, - 0x1893: 68, - 0x1894: 68, - 0x1895: 68, - 0x1896: 68, - 0x1897: 68, - 0x1898: 68, - 0x1899: 68, - 0x189A: 68, - 0x189B: 68, - 0x189C: 68, - 0x189D: 68, - 0x189E: 68, - 0x189F: 68, - 0x18A0: 68, - 0x18A1: 68, - 0x18A2: 68, - 0x18A3: 68, - 0x18A4: 68, - 0x18A5: 68, - 0x18A6: 68, - 0x18A7: 68, - 0x18A8: 68, - 0x18A9: 84, - 0x18AA: 68, - 0x1920: 84, - 0x1921: 84, - 0x1922: 84, - 0x1927: 84, - 0x1928: 84, - 0x1932: 84, - 0x1939: 84, - 0x193A: 84, - 0x193B: 84, - 0x1A17: 84, - 0x1A18: 84, - 0x1A1B: 84, - 0x1A56: 84, - 0x1A58: 84, - 0x1A59: 84, - 0x1A5A: 84, - 0x1A5B: 84, - 0x1A5C: 84, - 0x1A5D: 84, - 0x1A5E: 84, - 0x1A60: 84, - 0x1A62: 84, - 0x1A65: 84, - 0x1A66: 84, - 0x1A67: 84, - 0x1A68: 84, - 0x1A69: 84, - 0x1A6A: 84, - 0x1A6B: 84, - 0x1A6C: 84, - 0x1A73: 84, - 0x1A74: 84, - 0x1A75: 84, - 0x1A76: 84, - 0x1A77: 84, - 0x1A78: 84, - 0x1A79: 84, - 0x1A7A: 84, - 0x1A7B: 84, - 0x1A7C: 84, - 0x1A7F: 84, - 0x1AB0: 84, - 0x1AB1: 84, - 0x1AB2: 84, - 0x1AB3: 84, - 0x1AB4: 84, - 0x1AB5: 84, - 0x1AB6: 84, - 0x1AB7: 84, - 0x1AB8: 84, - 0x1AB9: 84, - 0x1ABA: 84, - 0x1ABB: 84, - 0x1ABC: 84, - 0x1ABD: 84, - 0x1ABE: 84, - 0x1ABF: 84, - 0x1AC0: 84, - 0x1AC1: 84, - 0x1AC2: 84, - 0x1AC3: 84, - 0x1AC4: 84, - 0x1AC5: 84, - 0x1AC6: 84, - 0x1AC7: 84, - 0x1AC8: 84, - 0x1AC9: 84, - 0x1ACA: 84, - 0x1ACB: 84, - 0x1ACC: 84, - 0x1ACD: 84, - 0x1ACE: 84, - 0x1B00: 84, - 0x1B01: 84, - 0x1B02: 84, - 0x1B03: 84, - 0x1B34: 84, - 0x1B36: 84, - 0x1B37: 84, - 0x1B38: 84, - 0x1B39: 84, - 0x1B3A: 84, - 0x1B3C: 84, - 0x1B42: 84, - 0x1B6B: 84, - 0x1B6C: 84, - 0x1B6D: 84, - 0x1B6E: 84, - 0x1B6F: 84, - 0x1B70: 84, - 0x1B71: 84, - 0x1B72: 84, - 0x1B73: 84, - 0x1B80: 84, - 0x1B81: 84, - 0x1BA2: 84, - 0x1BA3: 84, - 0x1BA4: 84, - 0x1BA5: 84, - 0x1BA8: 84, - 0x1BA9: 84, - 0x1BAB: 84, - 0x1BAC: 84, - 0x1BAD: 84, - 0x1BE6: 84, - 0x1BE8: 84, - 0x1BE9: 84, - 0x1BED: 84, - 0x1BEF: 84, - 0x1BF0: 84, - 0x1BF1: 84, - 0x1C2C: 84, - 0x1C2D: 84, - 0x1C2E: 84, - 0x1C2F: 84, - 0x1C30: 84, - 0x1C31: 84, - 0x1C32: 84, - 0x1C33: 84, - 0x1C36: 84, - 0x1C37: 84, - 0x1CD0: 84, - 0x1CD1: 84, - 0x1CD2: 84, - 0x1CD4: 84, - 0x1CD5: 84, - 0x1CD6: 84, - 0x1CD7: 84, - 0x1CD8: 84, - 0x1CD9: 84, - 0x1CDA: 84, - 0x1CDB: 84, - 0x1CDC: 84, - 0x1CDD: 84, - 0x1CDE: 84, - 0x1CDF: 84, - 0x1CE0: 84, - 0x1CE2: 84, - 0x1CE3: 84, - 0x1CE4: 84, - 0x1CE5: 84, - 0x1CE6: 84, - 0x1CE7: 84, - 0x1CE8: 84, - 0x1CED: 84, - 0x1CF4: 84, - 0x1CF8: 84, - 0x1CF9: 84, - 0x1DC0: 84, - 0x1DC1: 84, - 0x1DC2: 84, - 0x1DC3: 84, - 0x1DC4: 84, - 0x1DC5: 84, - 0x1DC6: 84, - 0x1DC7: 84, - 0x1DC8: 84, - 0x1DC9: 84, - 0x1DCA: 84, - 0x1DCB: 84, - 0x1DCC: 84, - 0x1DCD: 84, - 0x1DCE: 84, - 0x1DCF: 84, - 0x1DD0: 84, - 0x1DD1: 84, - 0x1DD2: 84, - 0x1DD3: 84, - 0x1DD4: 84, - 0x1DD5: 84, - 0x1DD6: 84, - 0x1DD7: 84, - 0x1DD8: 84, - 0x1DD9: 84, - 0x1DDA: 84, - 0x1DDB: 84, - 0x1DDC: 84, - 0x1DDD: 84, - 0x1DDE: 84, - 0x1DDF: 84, - 0x1DE0: 84, - 0x1DE1: 84, - 0x1DE2: 84, - 0x1DE3: 84, - 0x1DE4: 84, - 0x1DE5: 84, - 0x1DE6: 84, - 0x1DE7: 84, - 0x1DE8: 84, - 0x1DE9: 84, - 0x1DEA: 84, - 0x1DEB: 84, - 0x1DEC: 84, - 0x1DED: 84, - 0x1DEE: 84, - 0x1DEF: 84, - 0x1DF0: 84, - 0x1DF1: 84, - 0x1DF2: 84, - 0x1DF3: 84, - 0x1DF4: 84, - 0x1DF5: 84, - 0x1DF6: 84, - 0x1DF7: 84, - 0x1DF8: 84, - 0x1DF9: 84, - 0x1DFA: 84, - 0x1DFB: 84, - 0x1DFC: 84, - 0x1DFD: 84, - 0x1DFE: 84, - 0x1DFF: 84, - 0x200B: 84, - 0x200D: 67, - 0x200E: 84, - 0x200F: 84, - 0x202A: 84, - 0x202B: 84, - 0x202C: 84, - 0x202D: 84, - 0x202E: 84, - 0x2060: 84, - 0x2061: 84, - 0x2062: 84, - 0x2063: 84, - 0x2064: 84, - 0x206A: 84, - 0x206B: 84, - 0x206C: 84, - 0x206D: 84, - 0x206E: 84, - 0x206F: 84, - 0x20D0: 84, - 0x20D1: 84, - 0x20D2: 84, - 0x20D3: 84, - 0x20D4: 84, - 0x20D5: 84, - 0x20D6: 84, - 0x20D7: 84, - 0x20D8: 84, - 0x20D9: 84, - 0x20DA: 84, - 0x20DB: 84, - 0x20DC: 84, - 0x20DD: 84, - 0x20DE: 84, - 0x20DF: 84, - 0x20E0: 84, - 0x20E1: 84, - 0x20E2: 84, - 0x20E3: 84, - 0x20E4: 84, - 0x20E5: 84, - 0x20E6: 84, - 0x20E7: 84, - 0x20E8: 84, - 0x20E9: 84, - 0x20EA: 84, - 0x20EB: 84, - 0x20EC: 84, - 0x20ED: 84, - 0x20EE: 84, - 0x20EF: 84, - 0x20F0: 84, - 0x2CEF: 84, - 0x2CF0: 84, - 0x2CF1: 84, - 0x2D7F: 84, - 0x2DE0: 84, - 0x2DE1: 84, - 0x2DE2: 84, - 0x2DE3: 84, - 0x2DE4: 84, - 0x2DE5: 84, - 0x2DE6: 84, - 0x2DE7: 84, - 0x2DE8: 84, - 0x2DE9: 84, - 0x2DEA: 84, - 0x2DEB: 84, - 0x2DEC: 84, - 0x2DED: 84, - 0x2DEE: 84, - 0x2DEF: 84, - 0x2DF0: 84, - 0x2DF1: 84, - 0x2DF2: 84, - 0x2DF3: 84, - 0x2DF4: 84, - 0x2DF5: 84, - 0x2DF6: 84, - 0x2DF7: 84, - 0x2DF8: 84, - 0x2DF9: 84, - 0x2DFA: 84, - 0x2DFB: 84, - 0x2DFC: 84, - 0x2DFD: 84, - 0x2DFE: 84, - 0x2DFF: 84, - 0x302A: 84, - 0x302B: 84, - 0x302C: 84, - 0x302D: 84, - 0x3099: 84, - 0x309A: 84, - 0xA66F: 84, - 0xA670: 84, - 0xA671: 84, - 0xA672: 84, - 0xA674: 84, - 0xA675: 84, - 0xA676: 84, - 0xA677: 84, - 0xA678: 84, - 0xA679: 84, - 0xA67A: 84, - 0xA67B: 84, - 0xA67C: 84, - 0xA67D: 84, - 0xA69E: 84, - 0xA69F: 84, - 0xA6F0: 84, - 0xA6F1: 84, - 0xA802: 84, - 0xA806: 84, - 0xA80B: 84, - 0xA825: 84, - 0xA826: 84, - 0xA82C: 84, - 0xA840: 68, - 0xA841: 68, - 0xA842: 68, - 0xA843: 68, - 0xA844: 68, - 0xA845: 68, - 0xA846: 68, - 0xA847: 68, - 0xA848: 68, - 0xA849: 68, - 0xA84A: 68, - 0xA84B: 68, - 0xA84C: 68, - 0xA84D: 68, - 0xA84E: 68, - 0xA84F: 68, - 0xA850: 68, - 0xA851: 68, - 0xA852: 68, - 0xA853: 68, - 0xA854: 68, - 0xA855: 68, - 0xA856: 68, - 0xA857: 68, - 0xA858: 68, - 0xA859: 68, - 0xA85A: 68, - 0xA85B: 68, - 0xA85C: 68, - 0xA85D: 68, - 0xA85E: 68, - 0xA85F: 68, - 0xA860: 68, - 0xA861: 68, - 0xA862: 68, - 0xA863: 68, - 0xA864: 68, - 0xA865: 68, - 0xA866: 68, - 0xA867: 68, - 0xA868: 68, - 0xA869: 68, - 0xA86A: 68, - 0xA86B: 68, - 0xA86C: 68, - 0xA86D: 68, - 0xA86E: 68, - 0xA86F: 68, - 0xA870: 68, - 0xA871: 68, - 0xA872: 76, - 0xA8C4: 84, - 0xA8C5: 84, - 0xA8E0: 84, - 0xA8E1: 84, - 0xA8E2: 84, - 0xA8E3: 84, - 0xA8E4: 84, - 0xA8E5: 84, - 0xA8E6: 84, - 0xA8E7: 84, - 0xA8E8: 84, - 0xA8E9: 84, - 0xA8EA: 84, - 0xA8EB: 84, - 0xA8EC: 84, - 0xA8ED: 84, - 0xA8EE: 84, - 0xA8EF: 84, - 0xA8F0: 84, - 0xA8F1: 84, - 0xA8FF: 84, - 0xA926: 84, - 0xA927: 84, - 0xA928: 84, - 0xA929: 84, - 0xA92A: 84, - 0xA92B: 84, - 0xA92C: 84, - 0xA92D: 84, - 0xA947: 84, - 0xA948: 84, - 0xA949: 84, - 0xA94A: 84, - 0xA94B: 84, - 0xA94C: 84, - 0xA94D: 84, - 0xA94E: 84, - 0xA94F: 84, - 0xA950: 84, - 0xA951: 84, - 0xA980: 84, - 0xA981: 84, - 0xA982: 84, - 0xA9B3: 84, - 0xA9B6: 84, - 0xA9B7: 84, - 0xA9B8: 84, - 0xA9B9: 84, - 0xA9BC: 84, - 0xA9BD: 84, - 0xA9E5: 84, - 0xAA29: 84, - 0xAA2A: 84, - 0xAA2B: 84, - 0xAA2C: 84, - 0xAA2D: 84, - 0xAA2E: 84, - 0xAA31: 84, - 0xAA32: 84, - 0xAA35: 84, - 0xAA36: 84, - 0xAA43: 84, - 0xAA4C: 84, - 0xAA7C: 84, - 0xAAB0: 84, - 0xAAB2: 84, - 0xAAB3: 84, - 0xAAB4: 84, - 0xAAB7: 84, - 0xAAB8: 84, - 0xAABE: 84, - 0xAABF: 84, - 0xAAC1: 84, - 0xAAEC: 84, - 0xAAED: 84, - 0xAAF6: 84, - 0xABE5: 84, - 0xABE8: 84, - 0xABED: 84, - 0xFB1E: 84, - 0xFE00: 84, - 0xFE01: 84, - 0xFE02: 84, - 0xFE03: 84, - 0xFE04: 84, - 0xFE05: 84, - 0xFE06: 84, - 0xFE07: 84, - 0xFE08: 84, - 0xFE09: 84, - 0xFE0A: 84, - 0xFE0B: 84, - 0xFE0C: 84, - 0xFE0D: 84, - 0xFE0E: 84, - 0xFE0F: 84, - 0xFE20: 84, - 0xFE21: 84, - 0xFE22: 84, - 0xFE23: 84, - 0xFE24: 84, - 0xFE25: 84, - 0xFE26: 84, - 0xFE27: 84, - 0xFE28: 84, - 0xFE29: 84, - 0xFE2A: 84, - 0xFE2B: 84, - 0xFE2C: 84, - 0xFE2D: 84, - 0xFE2E: 84, - 0xFE2F: 84, - 0xFEFF: 84, - 0xFFF9: 84, - 0xFFFA: 84, - 0xFFFB: 84, - 0x101FD: 84, - 0x102E0: 84, - 0x10376: 84, - 0x10377: 84, - 0x10378: 84, - 0x10379: 84, - 0x1037A: 84, - 0x10A01: 84, - 0x10A02: 84, - 0x10A03: 84, - 0x10A05: 84, - 0x10A06: 84, - 0x10A0C: 84, - 0x10A0D: 84, - 0x10A0E: 84, - 0x10A0F: 84, - 0x10A38: 84, - 0x10A39: 84, - 0x10A3A: 84, - 0x10A3F: 84, - 0x10AC0: 68, - 0x10AC1: 68, - 0x10AC2: 68, - 0x10AC3: 68, - 0x10AC4: 68, - 0x10AC5: 82, - 0x10AC7: 82, - 0x10AC9: 82, - 0x10ACA: 82, - 0x10ACD: 76, - 0x10ACE: 82, - 0x10ACF: 82, - 0x10AD0: 82, - 0x10AD1: 82, - 0x10AD2: 82, - 0x10AD3: 68, - 0x10AD4: 68, - 0x10AD5: 68, - 0x10AD6: 68, - 0x10AD7: 76, - 0x10AD8: 68, - 0x10AD9: 68, - 0x10ADA: 68, - 0x10ADB: 68, - 0x10ADC: 68, - 0x10ADD: 82, - 0x10ADE: 68, - 0x10ADF: 68, - 0x10AE0: 68, - 0x10AE1: 82, - 0x10AE4: 82, - 0x10AE5: 84, - 0x10AE6: 84, - 0x10AEB: 68, - 0x10AEC: 68, - 0x10AED: 68, - 0x10AEE: 68, - 0x10AEF: 82, - 0x10B80: 68, - 0x10B81: 82, - 0x10B82: 68, - 0x10B83: 82, - 0x10B84: 82, - 0x10B85: 82, - 0x10B86: 68, - 0x10B87: 68, - 0x10B88: 68, - 0x10B89: 82, - 0x10B8A: 68, - 0x10B8B: 68, - 0x10B8C: 82, - 0x10B8D: 68, - 0x10B8E: 82, - 0x10B8F: 82, - 0x10B90: 68, - 0x10B91: 82, - 0x10BA9: 82, - 0x10BAA: 82, - 0x10BAB: 82, - 0x10BAC: 82, - 0x10BAD: 68, - 0x10BAE: 68, - 0x10D00: 76, - 0x10D01: 68, - 0x10D02: 68, - 0x10D03: 68, - 0x10D04: 68, - 0x10D05: 68, - 0x10D06: 68, - 0x10D07: 68, - 0x10D08: 68, - 0x10D09: 68, - 0x10D0A: 68, - 0x10D0B: 68, - 0x10D0C: 68, - 0x10D0D: 68, - 0x10D0E: 68, - 0x10D0F: 68, - 0x10D10: 68, - 0x10D11: 68, - 0x10D12: 68, - 0x10D13: 68, - 0x10D14: 68, - 0x10D15: 68, - 0x10D16: 68, - 0x10D17: 68, - 0x10D18: 68, - 0x10D19: 68, - 0x10D1A: 68, - 0x10D1B: 68, - 0x10D1C: 68, - 0x10D1D: 68, - 0x10D1E: 68, - 0x10D1F: 68, - 0x10D20: 68, - 0x10D21: 68, - 0x10D22: 82, - 0x10D23: 68, - 0x10D24: 84, - 0x10D25: 84, - 0x10D26: 84, - 0x10D27: 84, - 0x10EAB: 84, - 0x10EAC: 84, - 0x10EFD: 84, - 0x10EFE: 84, - 0x10EFF: 84, - 0x10F30: 68, - 0x10F31: 68, - 0x10F32: 68, - 0x10F33: 82, - 0x10F34: 68, - 0x10F35: 68, - 0x10F36: 68, - 0x10F37: 68, - 0x10F38: 68, - 0x10F39: 68, - 0x10F3A: 68, - 0x10F3B: 68, - 0x10F3C: 68, - 0x10F3D: 68, - 0x10F3E: 68, - 0x10F3F: 68, - 0x10F40: 68, - 0x10F41: 68, - 0x10F42: 68, - 0x10F43: 68, - 0x10F44: 68, - 0x10F46: 84, - 0x10F47: 84, - 0x10F48: 84, - 0x10F49: 84, - 0x10F4A: 84, - 0x10F4B: 84, - 0x10F4C: 84, - 0x10F4D: 84, - 0x10F4E: 84, - 0x10F4F: 84, - 0x10F50: 84, - 0x10F51: 68, - 0x10F52: 68, - 0x10F53: 68, - 0x10F54: 82, - 0x10F70: 68, - 0x10F71: 68, - 0x10F72: 68, - 0x10F73: 68, - 0x10F74: 82, - 0x10F75: 82, - 0x10F76: 68, - 0x10F77: 68, - 0x10F78: 68, - 0x10F79: 68, - 0x10F7A: 68, - 0x10F7B: 68, - 0x10F7C: 68, - 0x10F7D: 68, - 0x10F7E: 68, - 0x10F7F: 68, - 0x10F80: 68, - 0x10F81: 68, - 0x10F82: 84, - 0x10F83: 84, - 0x10F84: 84, - 0x10F85: 84, - 0x10FB0: 68, - 0x10FB2: 68, - 0x10FB3: 68, - 0x10FB4: 82, - 0x10FB5: 82, - 0x10FB6: 82, - 0x10FB8: 68, - 0x10FB9: 82, - 0x10FBA: 82, - 0x10FBB: 68, - 0x10FBC: 68, - 0x10FBD: 82, - 0x10FBE: 68, - 0x10FBF: 68, - 0x10FC1: 68, - 0x10FC2: 82, - 0x10FC3: 82, - 0x10FC4: 68, - 0x10FC9: 82, - 0x10FCA: 68, - 0x10FCB: 76, - 0x11001: 84, - 0x11038: 84, - 0x11039: 84, - 0x1103A: 84, - 0x1103B: 84, - 0x1103C: 84, - 0x1103D: 84, - 0x1103E: 84, - 0x1103F: 84, - 0x11040: 84, - 0x11041: 84, - 0x11042: 84, - 0x11043: 84, - 0x11044: 84, - 0x11045: 84, - 0x11046: 84, - 0x11070: 84, - 0x11073: 84, - 0x11074: 84, - 0x1107F: 84, - 0x11080: 84, - 0x11081: 84, - 0x110B3: 84, - 0x110B4: 84, - 0x110B5: 84, - 0x110B6: 84, - 0x110B9: 84, - 0x110BA: 84, - 0x110C2: 84, - 0x11100: 84, - 0x11101: 84, - 0x11102: 84, - 0x11127: 84, - 0x11128: 84, - 0x11129: 84, - 0x1112A: 84, - 0x1112B: 84, - 0x1112D: 84, - 0x1112E: 84, - 0x1112F: 84, - 0x11130: 84, - 0x11131: 84, - 0x11132: 84, - 0x11133: 84, - 0x11134: 84, - 0x11173: 84, - 0x11180: 84, - 0x11181: 84, - 0x111B6: 84, - 0x111B7: 84, - 0x111B8: 84, - 0x111B9: 84, - 0x111BA: 84, - 0x111BB: 84, - 0x111BC: 84, - 0x111BD: 84, - 0x111BE: 84, - 0x111C9: 84, - 0x111CA: 84, - 0x111CB: 84, - 0x111CC: 84, - 0x111CF: 84, - 0x1122F: 84, - 0x11230: 84, - 0x11231: 84, - 0x11234: 84, - 0x11236: 84, - 0x11237: 84, - 0x1123E: 84, - 0x11241: 84, - 0x112DF: 84, - 0x112E3: 84, - 0x112E4: 84, - 0x112E5: 84, - 0x112E6: 84, - 0x112E7: 84, - 0x112E8: 84, - 0x112E9: 84, - 0x112EA: 84, - 0x11300: 84, - 0x11301: 84, - 0x1133B: 84, - 0x1133C: 84, - 0x11340: 84, - 0x11366: 84, - 0x11367: 84, - 0x11368: 84, - 0x11369: 84, - 0x1136A: 84, - 0x1136B: 84, - 0x1136C: 84, - 0x11370: 84, - 0x11371: 84, - 0x11372: 84, - 0x11373: 84, - 0x11374: 84, - 0x11438: 84, - 0x11439: 84, - 0x1143A: 84, - 0x1143B: 84, - 0x1143C: 84, - 0x1143D: 84, - 0x1143E: 84, - 0x1143F: 84, - 0x11442: 84, - 0x11443: 84, - 0x11444: 84, - 0x11446: 84, - 0x1145E: 84, - 0x114B3: 84, - 0x114B4: 84, - 0x114B5: 84, - 0x114B6: 84, - 0x114B7: 84, - 0x114B8: 84, - 0x114BA: 84, - 0x114BF: 84, - 0x114C0: 84, - 0x114C2: 84, - 0x114C3: 84, - 0x115B2: 84, - 0x115B3: 84, - 0x115B4: 84, - 0x115B5: 84, - 0x115BC: 84, - 0x115BD: 84, - 0x115BF: 84, - 0x115C0: 84, - 0x115DC: 84, - 0x115DD: 84, - 0x11633: 84, - 0x11634: 84, - 0x11635: 84, - 0x11636: 84, - 0x11637: 84, - 0x11638: 84, - 0x11639: 84, - 0x1163A: 84, - 0x1163D: 84, - 0x1163F: 84, - 0x11640: 84, - 0x116AB: 84, - 0x116AD: 84, - 0x116B0: 84, - 0x116B1: 84, - 0x116B2: 84, - 0x116B3: 84, - 0x116B4: 84, - 0x116B5: 84, - 0x116B7: 84, - 0x1171D: 84, - 0x1171E: 84, - 0x1171F: 84, - 0x11722: 84, - 0x11723: 84, - 0x11724: 84, - 0x11725: 84, - 0x11727: 84, - 0x11728: 84, - 0x11729: 84, - 0x1172A: 84, - 0x1172B: 84, - 0x1182F: 84, - 0x11830: 84, - 0x11831: 84, - 0x11832: 84, - 0x11833: 84, - 0x11834: 84, - 0x11835: 84, - 0x11836: 84, - 0x11837: 84, - 0x11839: 84, - 0x1183A: 84, - 0x1193B: 84, - 0x1193C: 84, - 0x1193E: 84, - 0x11943: 84, - 0x119D4: 84, - 0x119D5: 84, - 0x119D6: 84, - 0x119D7: 84, - 0x119DA: 84, - 0x119DB: 84, - 0x119E0: 84, - 0x11A01: 84, - 0x11A02: 84, - 0x11A03: 84, - 0x11A04: 84, - 0x11A05: 84, - 0x11A06: 84, - 0x11A07: 84, - 0x11A08: 84, - 0x11A09: 84, - 0x11A0A: 84, - 0x11A33: 84, - 0x11A34: 84, - 0x11A35: 84, - 0x11A36: 84, - 0x11A37: 84, - 0x11A38: 84, - 0x11A3B: 84, - 0x11A3C: 84, - 0x11A3D: 84, - 0x11A3E: 84, - 0x11A47: 84, - 0x11A51: 84, - 0x11A52: 84, - 0x11A53: 84, - 0x11A54: 84, - 0x11A55: 84, - 0x11A56: 84, - 0x11A59: 84, - 0x11A5A: 84, - 0x11A5B: 84, - 0x11A8A: 84, - 0x11A8B: 84, - 0x11A8C: 84, - 0x11A8D: 84, - 0x11A8E: 84, - 0x11A8F: 84, - 0x11A90: 84, - 0x11A91: 84, - 0x11A92: 84, - 0x11A93: 84, - 0x11A94: 84, - 0x11A95: 84, - 0x11A96: 84, - 0x11A98: 84, - 0x11A99: 84, - 0x11C30: 84, - 0x11C31: 84, - 0x11C32: 84, - 0x11C33: 84, - 0x11C34: 84, - 0x11C35: 84, - 0x11C36: 84, - 0x11C38: 84, - 0x11C39: 84, - 0x11C3A: 84, - 0x11C3B: 84, - 0x11C3C: 84, - 0x11C3D: 84, - 0x11C3F: 84, - 0x11C92: 84, - 0x11C93: 84, - 0x11C94: 84, - 0x11C95: 84, - 0x11C96: 84, - 0x11C97: 84, - 0x11C98: 84, - 0x11C99: 84, - 0x11C9A: 84, - 0x11C9B: 84, - 0x11C9C: 84, - 0x11C9D: 84, - 0x11C9E: 84, - 0x11C9F: 84, - 0x11CA0: 84, - 0x11CA1: 84, - 0x11CA2: 84, - 0x11CA3: 84, - 0x11CA4: 84, - 0x11CA5: 84, - 0x11CA6: 84, - 0x11CA7: 84, - 0x11CAA: 84, - 0x11CAB: 84, - 0x11CAC: 84, - 0x11CAD: 84, - 0x11CAE: 84, - 0x11CAF: 84, - 0x11CB0: 84, - 0x11CB2: 84, - 0x11CB3: 84, - 0x11CB5: 84, - 0x11CB6: 84, - 0x11D31: 84, - 0x11D32: 84, - 0x11D33: 84, - 0x11D34: 84, - 0x11D35: 84, - 0x11D36: 84, - 0x11D3A: 84, - 0x11D3C: 84, - 0x11D3D: 84, - 0x11D3F: 84, - 0x11D40: 84, - 0x11D41: 84, - 0x11D42: 84, - 0x11D43: 84, - 0x11D44: 84, - 0x11D45: 84, - 0x11D47: 84, - 0x11D90: 84, - 0x11D91: 84, - 0x11D95: 84, - 0x11D97: 84, - 0x11EF3: 84, - 0x11EF4: 84, - 0x11F00: 84, - 0x11F01: 84, - 0x11F36: 84, - 0x11F37: 84, - 0x11F38: 84, - 0x11F39: 84, - 0x11F3A: 84, - 0x11F40: 84, - 0x11F42: 84, - 0x13430: 84, - 0x13431: 84, - 0x13432: 84, - 0x13433: 84, - 0x13434: 84, - 0x13435: 84, - 0x13436: 84, - 0x13437: 84, - 0x13438: 84, - 0x13439: 84, - 0x1343A: 84, - 0x1343B: 84, - 0x1343C: 84, - 0x1343D: 84, - 0x1343E: 84, - 0x1343F: 84, - 0x13440: 84, - 0x13447: 84, - 0x13448: 84, - 0x13449: 84, - 0x1344A: 84, - 0x1344B: 84, - 0x1344C: 84, - 0x1344D: 84, - 0x1344E: 84, - 0x1344F: 84, - 0x13450: 84, - 0x13451: 84, - 0x13452: 84, - 0x13453: 84, - 0x13454: 84, - 0x13455: 84, - 0x16AF0: 84, - 0x16AF1: 84, - 0x16AF2: 84, - 0x16AF3: 84, - 0x16AF4: 84, - 0x16B30: 84, - 0x16B31: 84, - 0x16B32: 84, - 0x16B33: 84, - 0x16B34: 84, - 0x16B35: 84, - 0x16B36: 84, - 0x16F4F: 84, - 0x16F8F: 84, - 0x16F90: 84, - 0x16F91: 84, - 0x16F92: 84, - 0x16FE4: 84, - 0x1BC9D: 84, - 0x1BC9E: 84, - 0x1BCA0: 84, - 0x1BCA1: 84, - 0x1BCA2: 84, - 0x1BCA3: 84, - 0x1CF00: 84, - 0x1CF01: 84, - 0x1CF02: 84, - 0x1CF03: 84, - 0x1CF04: 84, - 0x1CF05: 84, - 0x1CF06: 84, - 0x1CF07: 84, - 0x1CF08: 84, - 0x1CF09: 84, - 0x1CF0A: 84, - 0x1CF0B: 84, - 0x1CF0C: 84, - 0x1CF0D: 84, - 0x1CF0E: 84, - 0x1CF0F: 84, - 0x1CF10: 84, - 0x1CF11: 84, - 0x1CF12: 84, - 0x1CF13: 84, - 0x1CF14: 84, - 0x1CF15: 84, - 0x1CF16: 84, - 0x1CF17: 84, - 0x1CF18: 84, - 0x1CF19: 84, - 0x1CF1A: 84, - 0x1CF1B: 84, - 0x1CF1C: 84, - 0x1CF1D: 84, - 0x1CF1E: 84, - 0x1CF1F: 84, - 0x1CF20: 84, - 0x1CF21: 84, - 0x1CF22: 84, - 0x1CF23: 84, - 0x1CF24: 84, - 0x1CF25: 84, - 0x1CF26: 84, - 0x1CF27: 84, - 0x1CF28: 84, - 0x1CF29: 84, - 0x1CF2A: 84, - 0x1CF2B: 84, - 0x1CF2C: 84, - 0x1CF2D: 84, - 0x1CF30: 84, - 0x1CF31: 84, - 0x1CF32: 84, - 0x1CF33: 84, - 0x1CF34: 84, - 0x1CF35: 84, - 0x1CF36: 84, - 0x1CF37: 84, - 0x1CF38: 84, - 0x1CF39: 84, - 0x1CF3A: 84, - 0x1CF3B: 84, - 0x1CF3C: 84, - 0x1CF3D: 84, - 0x1CF3E: 84, - 0x1CF3F: 84, - 0x1CF40: 84, - 0x1CF41: 84, - 0x1CF42: 84, - 0x1CF43: 84, - 0x1CF44: 84, - 0x1CF45: 84, - 0x1CF46: 84, - 0x1D167: 84, - 0x1D168: 84, - 0x1D169: 84, - 0x1D173: 84, - 0x1D174: 84, - 0x1D175: 84, - 0x1D176: 84, - 0x1D177: 84, - 0x1D178: 84, - 0x1D179: 84, - 0x1D17A: 84, - 0x1D17B: 84, - 0x1D17C: 84, - 0x1D17D: 84, - 0x1D17E: 84, - 0x1D17F: 84, - 0x1D180: 84, - 0x1D181: 84, - 0x1D182: 84, - 0x1D185: 84, - 0x1D186: 84, - 0x1D187: 84, - 0x1D188: 84, - 0x1D189: 84, - 0x1D18A: 84, - 0x1D18B: 84, - 0x1D1AA: 84, - 0x1D1AB: 84, - 0x1D1AC: 84, - 0x1D1AD: 84, - 0x1D242: 84, - 0x1D243: 84, - 0x1D244: 84, - 0x1DA00: 84, - 0x1DA01: 84, - 0x1DA02: 84, - 0x1DA03: 84, - 0x1DA04: 84, - 0x1DA05: 84, - 0x1DA06: 84, - 0x1DA07: 84, - 0x1DA08: 84, - 0x1DA09: 84, - 0x1DA0A: 84, - 0x1DA0B: 84, - 0x1DA0C: 84, - 0x1DA0D: 84, - 0x1DA0E: 84, - 0x1DA0F: 84, - 0x1DA10: 84, - 0x1DA11: 84, - 0x1DA12: 84, - 0x1DA13: 84, - 0x1DA14: 84, - 0x1DA15: 84, - 0x1DA16: 84, - 0x1DA17: 84, - 0x1DA18: 84, - 0x1DA19: 84, - 0x1DA1A: 84, - 0x1DA1B: 84, - 0x1DA1C: 84, - 0x1DA1D: 84, - 0x1DA1E: 84, - 0x1DA1F: 84, - 0x1DA20: 84, - 0x1DA21: 84, - 0x1DA22: 84, - 0x1DA23: 84, - 0x1DA24: 84, - 0x1DA25: 84, - 0x1DA26: 84, - 0x1DA27: 84, - 0x1DA28: 84, - 0x1DA29: 84, - 0x1DA2A: 84, - 0x1DA2B: 84, - 0x1DA2C: 84, - 0x1DA2D: 84, - 0x1DA2E: 84, - 0x1DA2F: 84, - 0x1DA30: 84, - 0x1DA31: 84, - 0x1DA32: 84, - 0x1DA33: 84, - 0x1DA34: 84, - 0x1DA35: 84, - 0x1DA36: 84, - 0x1DA3B: 84, - 0x1DA3C: 84, - 0x1DA3D: 84, - 0x1DA3E: 84, - 0x1DA3F: 84, - 0x1DA40: 84, - 0x1DA41: 84, - 0x1DA42: 84, - 0x1DA43: 84, - 0x1DA44: 84, - 0x1DA45: 84, - 0x1DA46: 84, - 0x1DA47: 84, - 0x1DA48: 84, - 0x1DA49: 84, - 0x1DA4A: 84, - 0x1DA4B: 84, - 0x1DA4C: 84, - 0x1DA4D: 84, - 0x1DA4E: 84, - 0x1DA4F: 84, - 0x1DA50: 84, - 0x1DA51: 84, - 0x1DA52: 84, - 0x1DA53: 84, - 0x1DA54: 84, - 0x1DA55: 84, - 0x1DA56: 84, - 0x1DA57: 84, - 0x1DA58: 84, - 0x1DA59: 84, - 0x1DA5A: 84, - 0x1DA5B: 84, - 0x1DA5C: 84, - 0x1DA5D: 84, - 0x1DA5E: 84, - 0x1DA5F: 84, - 0x1DA60: 84, - 0x1DA61: 84, - 0x1DA62: 84, - 0x1DA63: 84, - 0x1DA64: 84, - 0x1DA65: 84, - 0x1DA66: 84, - 0x1DA67: 84, - 0x1DA68: 84, - 0x1DA69: 84, - 0x1DA6A: 84, - 0x1DA6B: 84, - 0x1DA6C: 84, - 0x1DA75: 84, - 0x1DA84: 84, - 0x1DA9B: 84, - 0x1DA9C: 84, - 0x1DA9D: 84, - 0x1DA9E: 84, - 0x1DA9F: 84, - 0x1DAA1: 84, - 0x1DAA2: 84, - 0x1DAA3: 84, - 0x1DAA4: 84, - 0x1DAA5: 84, - 0x1DAA6: 84, - 0x1DAA7: 84, - 0x1DAA8: 84, - 0x1DAA9: 84, - 0x1DAAA: 84, - 0x1DAAB: 84, - 0x1DAAC: 84, - 0x1DAAD: 84, - 0x1DAAE: 84, - 0x1DAAF: 84, - 0x1E000: 84, - 0x1E001: 84, - 0x1E002: 84, - 0x1E003: 84, - 0x1E004: 84, - 0x1E005: 84, - 0x1E006: 84, - 0x1E008: 84, - 0x1E009: 84, - 0x1E00A: 84, - 0x1E00B: 84, - 0x1E00C: 84, - 0x1E00D: 84, - 0x1E00E: 84, - 0x1E00F: 84, - 0x1E010: 84, - 0x1E011: 84, - 0x1E012: 84, - 0x1E013: 84, - 0x1E014: 84, - 0x1E015: 84, - 0x1E016: 84, - 0x1E017: 84, - 0x1E018: 84, - 0x1E01B: 84, - 0x1E01C: 84, - 0x1E01D: 84, - 0x1E01E: 84, - 0x1E01F: 84, - 0x1E020: 84, - 0x1E021: 84, - 0x1E023: 84, - 0x1E024: 84, - 0x1E026: 84, - 0x1E027: 84, - 0x1E028: 84, - 0x1E029: 84, - 0x1E02A: 84, - 0x1E08F: 84, - 0x1E130: 84, - 0x1E131: 84, - 0x1E132: 84, - 0x1E133: 84, - 0x1E134: 84, - 0x1E135: 84, - 0x1E136: 84, - 0x1E2AE: 84, - 0x1E2EC: 84, - 0x1E2ED: 84, - 0x1E2EE: 84, - 0x1E2EF: 84, - 0x1E4EC: 84, - 0x1E4ED: 84, - 0x1E4EE: 84, - 0x1E4EF: 84, - 0x1E8D0: 84, - 0x1E8D1: 84, - 0x1E8D2: 84, - 0x1E8D3: 84, - 0x1E8D4: 84, - 0x1E8D5: 84, - 0x1E8D6: 84, - 0x1E900: 68, - 0x1E901: 68, - 0x1E902: 68, - 0x1E903: 68, - 0x1E904: 68, - 0x1E905: 68, - 0x1E906: 68, - 0x1E907: 68, - 0x1E908: 68, - 0x1E909: 68, - 0x1E90A: 68, - 0x1E90B: 68, - 0x1E90C: 68, - 0x1E90D: 68, - 0x1E90E: 68, - 0x1E90F: 68, - 0x1E910: 68, - 0x1E911: 68, - 0x1E912: 68, - 0x1E913: 68, - 0x1E914: 68, - 0x1E915: 68, - 0x1E916: 68, - 0x1E917: 68, - 0x1E918: 68, - 0x1E919: 68, - 0x1E91A: 68, - 0x1E91B: 68, - 0x1E91C: 68, - 0x1E91D: 68, - 0x1E91E: 68, - 0x1E91F: 68, - 0x1E920: 68, - 0x1E921: 68, - 0x1E922: 68, - 0x1E923: 68, - 0x1E924: 68, - 0x1E925: 68, - 0x1E926: 68, - 0x1E927: 68, - 0x1E928: 68, - 0x1E929: 68, - 0x1E92A: 68, - 0x1E92B: 68, - 0x1E92C: 68, - 0x1E92D: 68, - 0x1E92E: 68, - 0x1E92F: 68, - 0x1E930: 68, - 0x1E931: 68, - 0x1E932: 68, - 0x1E933: 68, - 0x1E934: 68, - 0x1E935: 68, - 0x1E936: 68, - 0x1E937: 68, - 0x1E938: 68, - 0x1E939: 68, - 0x1E93A: 68, - 0x1E93B: 68, - 0x1E93C: 68, - 0x1E93D: 68, - 0x1E93E: 68, - 0x1E93F: 68, - 0x1E940: 68, - 0x1E941: 68, - 0x1E942: 68, - 0x1E943: 68, - 0x1E944: 84, - 0x1E945: 84, - 0x1E946: 84, - 0x1E947: 84, - 0x1E948: 84, - 0x1E949: 84, - 0x1E94A: 84, - 0x1E94B: 84, - 0xE0001: 84, - 0xE0020: 84, - 0xE0021: 84, - 0xE0022: 84, - 0xE0023: 84, - 0xE0024: 84, - 0xE0025: 84, - 0xE0026: 84, - 0xE0027: 84, - 0xE0028: 84, - 0xE0029: 84, - 0xE002A: 84, - 0xE002B: 84, - 0xE002C: 84, - 0xE002D: 84, - 0xE002E: 84, - 0xE002F: 84, - 0xE0030: 84, - 0xE0031: 84, - 0xE0032: 84, - 0xE0033: 84, - 0xE0034: 84, - 0xE0035: 84, - 0xE0036: 84, - 0xE0037: 84, - 0xE0038: 84, - 0xE0039: 84, - 0xE003A: 84, - 0xE003B: 84, - 0xE003C: 84, - 0xE003D: 84, - 0xE003E: 84, - 0xE003F: 84, - 0xE0040: 84, - 0xE0041: 84, - 0xE0042: 84, - 0xE0043: 84, - 0xE0044: 84, - 0xE0045: 84, - 0xE0046: 84, - 0xE0047: 84, - 0xE0048: 84, - 0xE0049: 84, - 0xE004A: 84, - 0xE004B: 84, - 0xE004C: 84, - 0xE004D: 84, - 0xE004E: 84, - 0xE004F: 84, - 0xE0050: 84, - 0xE0051: 84, - 0xE0052: 84, - 0xE0053: 84, - 0xE0054: 84, - 0xE0055: 84, - 0xE0056: 84, - 0xE0057: 84, - 0xE0058: 84, - 0xE0059: 84, - 0xE005A: 84, - 0xE005B: 84, - 0xE005C: 84, - 0xE005D: 84, - 0xE005E: 84, - 0xE005F: 84, - 0xE0060: 84, - 0xE0061: 84, - 0xE0062: 84, - 0xE0063: 84, - 0xE0064: 84, - 0xE0065: 84, - 0xE0066: 84, - 0xE0067: 84, - 0xE0068: 84, - 0xE0069: 84, - 0xE006A: 84, - 0xE006B: 84, - 0xE006C: 84, - 0xE006D: 84, - 0xE006E: 84, - 0xE006F: 84, - 0xE0070: 84, - 0xE0071: 84, - 0xE0072: 84, - 0xE0073: 84, - 0xE0074: 84, - 0xE0075: 84, - 0xE0076: 84, - 0xE0077: 84, - 0xE0078: 84, - 0xE0079: 84, - 0xE007A: 84, - 0xE007B: 84, - 0xE007C: 84, - 0xE007D: 84, - 0xE007E: 84, - 0xE007F: 84, - 0xE0100: 84, - 0xE0101: 84, - 0xE0102: 84, - 0xE0103: 84, - 0xE0104: 84, - 0xE0105: 84, - 0xE0106: 84, - 0xE0107: 84, - 0xE0108: 84, - 0xE0109: 84, - 0xE010A: 84, - 0xE010B: 84, - 0xE010C: 84, - 0xE010D: 84, - 0xE010E: 84, - 0xE010F: 84, - 0xE0110: 84, - 0xE0111: 84, - 0xE0112: 84, - 0xE0113: 84, - 0xE0114: 84, - 0xE0115: 84, - 0xE0116: 84, - 0xE0117: 84, - 0xE0118: 84, - 0xE0119: 84, - 0xE011A: 84, - 0xE011B: 84, - 0xE011C: 84, - 0xE011D: 84, - 0xE011E: 84, - 0xE011F: 84, - 0xE0120: 84, - 0xE0121: 84, - 0xE0122: 84, - 0xE0123: 84, - 0xE0124: 84, - 0xE0125: 84, - 0xE0126: 84, - 0xE0127: 84, - 0xE0128: 84, - 0xE0129: 84, - 0xE012A: 84, - 0xE012B: 84, - 0xE012C: 84, - 0xE012D: 84, - 0xE012E: 84, - 0xE012F: 84, - 0xE0130: 84, - 0xE0131: 84, - 0xE0132: 84, - 0xE0133: 84, - 0xE0134: 84, - 0xE0135: 84, - 0xE0136: 84, - 0xE0137: 84, - 0xE0138: 84, - 0xE0139: 84, - 0xE013A: 84, - 0xE013B: 84, - 0xE013C: 84, - 0xE013D: 84, - 0xE013E: 84, - 0xE013F: 84, - 0xE0140: 84, - 0xE0141: 84, - 0xE0142: 84, - 0xE0143: 84, - 0xE0144: 84, - 0xE0145: 84, - 0xE0146: 84, - 0xE0147: 84, - 0xE0148: 84, - 0xE0149: 84, - 0xE014A: 84, - 0xE014B: 84, - 0xE014C: 84, - 0xE014D: 84, - 0xE014E: 84, - 0xE014F: 84, - 0xE0150: 84, - 0xE0151: 84, - 0xE0152: 84, - 0xE0153: 84, - 0xE0154: 84, - 0xE0155: 84, - 0xE0156: 84, - 0xE0157: 84, - 0xE0158: 84, - 0xE0159: 84, - 0xE015A: 84, - 0xE015B: 84, - 0xE015C: 84, - 0xE015D: 84, - 0xE015E: 84, - 0xE015F: 84, - 0xE0160: 84, - 0xE0161: 84, - 0xE0162: 84, - 0xE0163: 84, - 0xE0164: 84, - 0xE0165: 84, - 0xE0166: 84, - 0xE0167: 84, - 0xE0168: 84, - 0xE0169: 84, - 0xE016A: 84, - 0xE016B: 84, - 0xE016C: 84, - 0xE016D: 84, - 0xE016E: 84, - 0xE016F: 84, - 0xE0170: 84, - 0xE0171: 84, - 0xE0172: 84, - 0xE0173: 84, - 0xE0174: 84, - 0xE0175: 84, - 0xE0176: 84, - 0xE0177: 84, - 0xE0178: 84, - 0xE0179: 84, - 0xE017A: 84, - 0xE017B: 84, - 0xE017C: 84, - 0xE017D: 84, - 0xE017E: 84, - 0xE017F: 84, - 0xE0180: 84, - 0xE0181: 84, - 0xE0182: 84, - 0xE0183: 84, - 0xE0184: 84, - 0xE0185: 84, - 0xE0186: 84, - 0xE0187: 84, - 0xE0188: 84, - 0xE0189: 84, - 0xE018A: 84, - 0xE018B: 84, - 0xE018C: 84, - 0xE018D: 84, - 0xE018E: 84, - 0xE018F: 84, - 0xE0190: 84, - 0xE0191: 84, - 0xE0192: 84, - 0xE0193: 84, - 0xE0194: 84, - 0xE0195: 84, - 0xE0196: 84, - 0xE0197: 84, - 0xE0198: 84, - 0xE0199: 84, - 0xE019A: 84, - 0xE019B: 84, - 0xE019C: 84, - 0xE019D: 84, - 0xE019E: 84, - 0xE019F: 84, - 0xE01A0: 84, - 0xE01A1: 84, - 0xE01A2: 84, - 0xE01A3: 84, - 0xE01A4: 84, - 0xE01A5: 84, - 0xE01A6: 84, - 0xE01A7: 84, - 0xE01A8: 84, - 0xE01A9: 84, - 0xE01AA: 84, - 0xE01AB: 84, - 0xE01AC: 84, - 0xE01AD: 84, - 0xE01AE: 84, - 0xE01AF: 84, - 0xE01B0: 84, - 0xE01B1: 84, - 0xE01B2: 84, - 0xE01B3: 84, - 0xE01B4: 84, - 0xE01B5: 84, - 0xE01B6: 84, - 0xE01B7: 84, - 0xE01B8: 84, - 0xE01B9: 84, - 0xE01BA: 84, - 0xE01BB: 84, - 0xE01BC: 84, - 0xE01BD: 84, - 0xE01BE: 84, - 0xE01BF: 84, - 0xE01C0: 84, - 0xE01C1: 84, - 0xE01C2: 84, - 0xE01C3: 84, - 0xE01C4: 84, - 0xE01C5: 84, - 0xE01C6: 84, - 0xE01C7: 84, - 0xE01C8: 84, - 0xE01C9: 84, - 0xE01CA: 84, - 0xE01CB: 84, - 0xE01CC: 84, - 0xE01CD: 84, - 0xE01CE: 84, - 0xE01CF: 84, - 0xE01D0: 84, - 0xE01D1: 84, - 0xE01D2: 84, - 0xE01D3: 84, - 0xE01D4: 84, - 0xE01D5: 84, - 0xE01D6: 84, - 0xE01D7: 84, - 0xE01D8: 84, - 0xE01D9: 84, - 0xE01DA: 84, - 0xE01DB: 84, - 0xE01DC: 84, - 0xE01DD: 84, - 0xE01DE: 84, - 0xE01DF: 84, - 0xE01E0: 84, - 0xE01E1: 84, - 0xE01E2: 84, - 0xE01E3: 84, - 0xE01E4: 84, - 0xE01E5: 84, - 0xE01E6: 84, - 0xE01E7: 84, - 0xE01E8: 84, - 0xE01E9: 84, - 0xE01EA: 84, - 0xE01EB: 84, - 0xE01EC: 84, - 0xE01ED: 84, - 0xE01EE: 84, - 0xE01EF: 84, -} -codepoint_classes = { - "PVALID": ( - 0x2D0000002E, - 0x300000003A, - 0x610000007B, - 0xDF000000F7, - 0xF800000100, - 0x10100000102, - 0x10300000104, - 0x10500000106, - 0x10700000108, - 0x1090000010A, - 0x10B0000010C, - 0x10D0000010E, - 0x10F00000110, - 0x11100000112, - 0x11300000114, - 0x11500000116, - 0x11700000118, - 0x1190000011A, - 0x11B0000011C, - 0x11D0000011E, - 0x11F00000120, - 0x12100000122, - 0x12300000124, - 0x12500000126, - 0x12700000128, - 0x1290000012A, - 0x12B0000012C, - 0x12D0000012E, - 0x12F00000130, - 0x13100000132, - 0x13500000136, - 0x13700000139, - 0x13A0000013B, - 0x13C0000013D, - 0x13E0000013F, - 0x14200000143, - 0x14400000145, - 0x14600000147, - 0x14800000149, - 0x14B0000014C, - 0x14D0000014E, - 0x14F00000150, - 0x15100000152, - 0x15300000154, - 0x15500000156, - 0x15700000158, - 0x1590000015A, - 0x15B0000015C, - 0x15D0000015E, - 0x15F00000160, - 0x16100000162, - 0x16300000164, - 0x16500000166, - 0x16700000168, - 0x1690000016A, - 0x16B0000016C, - 0x16D0000016E, - 0x16F00000170, - 0x17100000172, - 0x17300000174, - 0x17500000176, - 0x17700000178, - 0x17A0000017B, - 0x17C0000017D, - 0x17E0000017F, - 0x18000000181, - 0x18300000184, - 0x18500000186, - 0x18800000189, - 0x18C0000018E, - 0x19200000193, - 0x19500000196, - 0x1990000019C, - 0x19E0000019F, - 0x1A1000001A2, - 0x1A3000001A4, - 0x1A5000001A6, - 0x1A8000001A9, - 0x1AA000001AC, - 0x1AD000001AE, - 0x1B0000001B1, - 0x1B4000001B5, - 0x1B6000001B7, - 0x1B9000001BC, - 0x1BD000001C4, - 0x1CE000001CF, - 0x1D0000001D1, - 0x1D2000001D3, - 0x1D4000001D5, - 0x1D6000001D7, - 0x1D8000001D9, - 0x1DA000001DB, - 0x1DC000001DE, - 0x1DF000001E0, - 0x1E1000001E2, - 0x1E3000001E4, - 0x1E5000001E6, - 0x1E7000001E8, - 0x1E9000001EA, - 0x1EB000001EC, - 0x1ED000001EE, - 0x1EF000001F1, - 0x1F5000001F6, - 0x1F9000001FA, - 0x1FB000001FC, - 0x1FD000001FE, - 0x1FF00000200, - 0x20100000202, - 0x20300000204, - 0x20500000206, - 0x20700000208, - 0x2090000020A, - 0x20B0000020C, - 0x20D0000020E, - 0x20F00000210, - 0x21100000212, - 0x21300000214, - 0x21500000216, - 0x21700000218, - 0x2190000021A, - 0x21B0000021C, - 0x21D0000021E, - 0x21F00000220, - 0x22100000222, - 0x22300000224, - 0x22500000226, - 0x22700000228, - 0x2290000022A, - 0x22B0000022C, - 0x22D0000022E, - 0x22F00000230, - 0x23100000232, - 0x2330000023A, - 0x23C0000023D, - 0x23F00000241, - 0x24200000243, - 0x24700000248, - 0x2490000024A, - 0x24B0000024C, - 0x24D0000024E, - 0x24F000002B0, - 0x2B9000002C2, - 0x2C6000002D2, - 0x2EC000002ED, - 0x2EE000002EF, - 0x30000000340, - 0x34200000343, - 0x3460000034F, - 0x35000000370, - 0x37100000372, - 0x37300000374, - 0x37700000378, - 0x37B0000037E, - 0x39000000391, - 0x3AC000003CF, - 0x3D7000003D8, - 0x3D9000003DA, - 0x3DB000003DC, - 0x3DD000003DE, - 0x3DF000003E0, - 0x3E1000003E2, - 0x3E3000003E4, - 0x3E5000003E6, - 0x3E7000003E8, - 0x3E9000003EA, - 0x3EB000003EC, - 0x3ED000003EE, - 0x3EF000003F0, - 0x3F3000003F4, - 0x3F8000003F9, - 0x3FB000003FD, - 0x43000000460, - 0x46100000462, - 0x46300000464, - 0x46500000466, - 0x46700000468, - 0x4690000046A, - 0x46B0000046C, - 0x46D0000046E, - 0x46F00000470, - 0x47100000472, - 0x47300000474, - 0x47500000476, - 0x47700000478, - 0x4790000047A, - 0x47B0000047C, - 0x47D0000047E, - 0x47F00000480, - 0x48100000482, - 0x48300000488, - 0x48B0000048C, - 0x48D0000048E, - 0x48F00000490, - 0x49100000492, - 0x49300000494, - 0x49500000496, - 0x49700000498, - 0x4990000049A, - 0x49B0000049C, - 0x49D0000049E, - 0x49F000004A0, - 0x4A1000004A2, - 0x4A3000004A4, - 0x4A5000004A6, - 0x4A7000004A8, - 0x4A9000004AA, - 0x4AB000004AC, - 0x4AD000004AE, - 0x4AF000004B0, - 0x4B1000004B2, - 0x4B3000004B4, - 0x4B5000004B6, - 0x4B7000004B8, - 0x4B9000004BA, - 0x4BB000004BC, - 0x4BD000004BE, - 0x4BF000004C0, - 0x4C2000004C3, - 0x4C4000004C5, - 0x4C6000004C7, - 0x4C8000004C9, - 0x4CA000004CB, - 0x4CC000004CD, - 0x4CE000004D0, - 0x4D1000004D2, - 0x4D3000004D4, - 0x4D5000004D6, - 0x4D7000004D8, - 0x4D9000004DA, - 0x4DB000004DC, - 0x4DD000004DE, - 0x4DF000004E0, - 0x4E1000004E2, - 0x4E3000004E4, - 0x4E5000004E6, - 0x4E7000004E8, - 0x4E9000004EA, - 0x4EB000004EC, - 0x4ED000004EE, - 0x4EF000004F0, - 0x4F1000004F2, - 0x4F3000004F4, - 0x4F5000004F6, - 0x4F7000004F8, - 0x4F9000004FA, - 0x4FB000004FC, - 0x4FD000004FE, - 0x4FF00000500, - 0x50100000502, - 0x50300000504, - 0x50500000506, - 0x50700000508, - 0x5090000050A, - 0x50B0000050C, - 0x50D0000050E, - 0x50F00000510, - 0x51100000512, - 0x51300000514, - 0x51500000516, - 0x51700000518, - 0x5190000051A, - 0x51B0000051C, - 0x51D0000051E, - 0x51F00000520, - 0x52100000522, - 0x52300000524, - 0x52500000526, - 0x52700000528, - 0x5290000052A, - 0x52B0000052C, - 0x52D0000052E, - 0x52F00000530, - 0x5590000055A, - 0x56000000587, - 0x58800000589, - 0x591000005BE, - 0x5BF000005C0, - 0x5C1000005C3, - 0x5C4000005C6, - 0x5C7000005C8, - 0x5D0000005EB, - 0x5EF000005F3, - 0x6100000061B, - 0x62000000640, - 0x64100000660, - 0x66E00000675, - 0x679000006D4, - 0x6D5000006DD, - 0x6DF000006E9, - 0x6EA000006F0, - 0x6FA00000700, - 0x7100000074B, - 0x74D000007B2, - 0x7C0000007F6, - 0x7FD000007FE, - 0x8000000082E, - 0x8400000085C, - 0x8600000086B, - 0x87000000888, - 0x8890000088F, - 0x898000008E2, - 0x8E300000958, - 0x96000000964, - 0x96600000970, - 0x97100000984, - 0x9850000098D, - 0x98F00000991, - 0x993000009A9, - 0x9AA000009B1, - 0x9B2000009B3, - 0x9B6000009BA, - 0x9BC000009C5, - 0x9C7000009C9, - 0x9CB000009CF, - 0x9D7000009D8, - 0x9E0000009E4, - 0x9E6000009F2, - 0x9FC000009FD, - 0x9FE000009FF, - 0xA0100000A04, - 0xA0500000A0B, - 0xA0F00000A11, - 0xA1300000A29, - 0xA2A00000A31, - 0xA3200000A33, - 0xA3500000A36, - 0xA3800000A3A, - 0xA3C00000A3D, - 0xA3E00000A43, - 0xA4700000A49, - 0xA4B00000A4E, - 0xA5100000A52, - 0xA5C00000A5D, - 0xA6600000A76, - 0xA8100000A84, - 0xA8500000A8E, - 0xA8F00000A92, - 0xA9300000AA9, - 0xAAA00000AB1, - 0xAB200000AB4, - 0xAB500000ABA, - 0xABC00000AC6, - 0xAC700000ACA, - 0xACB00000ACE, - 0xAD000000AD1, - 0xAE000000AE4, - 0xAE600000AF0, - 0xAF900000B00, - 0xB0100000B04, - 0xB0500000B0D, - 0xB0F00000B11, - 0xB1300000B29, - 0xB2A00000B31, - 0xB3200000B34, - 0xB3500000B3A, - 0xB3C00000B45, - 0xB4700000B49, - 0xB4B00000B4E, - 0xB5500000B58, - 0xB5F00000B64, - 0xB6600000B70, - 0xB7100000B72, - 0xB8200000B84, - 0xB8500000B8B, - 0xB8E00000B91, - 0xB9200000B96, - 0xB9900000B9B, - 0xB9C00000B9D, - 0xB9E00000BA0, - 0xBA300000BA5, - 0xBA800000BAB, - 0xBAE00000BBA, - 0xBBE00000BC3, - 0xBC600000BC9, - 0xBCA00000BCE, - 0xBD000000BD1, - 0xBD700000BD8, - 0xBE600000BF0, - 0xC0000000C0D, - 0xC0E00000C11, - 0xC1200000C29, - 0xC2A00000C3A, - 0xC3C00000C45, - 0xC4600000C49, - 0xC4A00000C4E, - 0xC5500000C57, - 0xC5800000C5B, - 0xC5D00000C5E, - 0xC6000000C64, - 0xC6600000C70, - 0xC8000000C84, - 0xC8500000C8D, - 0xC8E00000C91, - 0xC9200000CA9, - 0xCAA00000CB4, - 0xCB500000CBA, - 0xCBC00000CC5, - 0xCC600000CC9, - 0xCCA00000CCE, - 0xCD500000CD7, - 0xCDD00000CDF, - 0xCE000000CE4, - 0xCE600000CF0, - 0xCF100000CF4, - 0xD0000000D0D, - 0xD0E00000D11, - 0xD1200000D45, - 0xD4600000D49, - 0xD4A00000D4F, - 0xD5400000D58, - 0xD5F00000D64, - 0xD6600000D70, - 0xD7A00000D80, - 0xD8100000D84, - 0xD8500000D97, - 0xD9A00000DB2, - 0xDB300000DBC, - 0xDBD00000DBE, - 0xDC000000DC7, - 0xDCA00000DCB, - 0xDCF00000DD5, - 0xDD600000DD7, - 0xDD800000DE0, - 0xDE600000DF0, - 0xDF200000DF4, - 0xE0100000E33, - 0xE3400000E3B, - 0xE4000000E4F, - 0xE5000000E5A, - 0xE8100000E83, - 0xE8400000E85, - 0xE8600000E8B, - 0xE8C00000EA4, - 0xEA500000EA6, - 0xEA700000EB3, - 0xEB400000EBE, - 0xEC000000EC5, - 0xEC600000EC7, - 0xEC800000ECF, - 0xED000000EDA, - 0xEDE00000EE0, - 0xF0000000F01, - 0xF0B00000F0C, - 0xF1800000F1A, - 0xF2000000F2A, - 0xF3500000F36, - 0xF3700000F38, - 0xF3900000F3A, - 0xF3E00000F43, - 0xF4400000F48, - 0xF4900000F4D, - 0xF4E00000F52, - 0xF5300000F57, - 0xF5800000F5C, - 0xF5D00000F69, - 0xF6A00000F6D, - 0xF7100000F73, - 0xF7400000F75, - 0xF7A00000F81, - 0xF8200000F85, - 0xF8600000F93, - 0xF9400000F98, - 0xF9900000F9D, - 0xF9E00000FA2, - 0xFA300000FA7, - 0xFA800000FAC, - 0xFAD00000FB9, - 0xFBA00000FBD, - 0xFC600000FC7, - 0x10000000104A, - 0x10500000109E, - 0x10D0000010FB, - 0x10FD00001100, - 0x120000001249, - 0x124A0000124E, - 0x125000001257, - 0x125800001259, - 0x125A0000125E, - 0x126000001289, - 0x128A0000128E, - 0x1290000012B1, - 0x12B2000012B6, - 0x12B8000012BF, - 0x12C0000012C1, - 0x12C2000012C6, - 0x12C8000012D7, - 0x12D800001311, - 0x131200001316, - 0x13180000135B, - 0x135D00001360, - 0x138000001390, - 0x13A0000013F6, - 0x14010000166D, - 0x166F00001680, - 0x16810000169B, - 0x16A0000016EB, - 0x16F1000016F9, - 0x170000001716, - 0x171F00001735, - 0x174000001754, - 0x17600000176D, - 0x176E00001771, - 0x177200001774, - 0x1780000017B4, - 0x17B6000017D4, - 0x17D7000017D8, - 0x17DC000017DE, - 0x17E0000017EA, - 0x18100000181A, - 0x182000001879, - 0x1880000018AB, - 0x18B0000018F6, - 0x19000000191F, - 0x19200000192C, - 0x19300000193C, - 0x19460000196E, - 0x197000001975, - 0x1980000019AC, - 0x19B0000019CA, - 0x19D0000019DA, - 0x1A0000001A1C, - 0x1A2000001A5F, - 0x1A6000001A7D, - 0x1A7F00001A8A, - 0x1A9000001A9A, - 0x1AA700001AA8, - 0x1AB000001ABE, - 0x1ABF00001ACF, - 0x1B0000001B4D, - 0x1B5000001B5A, - 0x1B6B00001B74, - 0x1B8000001BF4, - 0x1C0000001C38, - 0x1C4000001C4A, - 0x1C4D00001C7E, - 0x1CD000001CD3, - 0x1CD400001CFB, - 0x1D0000001D2C, - 0x1D2F00001D30, - 0x1D3B00001D3C, - 0x1D4E00001D4F, - 0x1D6B00001D78, - 0x1D7900001D9B, - 0x1DC000001E00, - 0x1E0100001E02, - 0x1E0300001E04, - 0x1E0500001E06, - 0x1E0700001E08, - 0x1E0900001E0A, - 0x1E0B00001E0C, - 0x1E0D00001E0E, - 0x1E0F00001E10, - 0x1E1100001E12, - 0x1E1300001E14, - 0x1E1500001E16, - 0x1E1700001E18, - 0x1E1900001E1A, - 0x1E1B00001E1C, - 0x1E1D00001E1E, - 0x1E1F00001E20, - 0x1E2100001E22, - 0x1E2300001E24, - 0x1E2500001E26, - 0x1E2700001E28, - 0x1E2900001E2A, - 0x1E2B00001E2C, - 0x1E2D00001E2E, - 0x1E2F00001E30, - 0x1E3100001E32, - 0x1E3300001E34, - 0x1E3500001E36, - 0x1E3700001E38, - 0x1E3900001E3A, - 0x1E3B00001E3C, - 0x1E3D00001E3E, - 0x1E3F00001E40, - 0x1E4100001E42, - 0x1E4300001E44, - 0x1E4500001E46, - 0x1E4700001E48, - 0x1E4900001E4A, - 0x1E4B00001E4C, - 0x1E4D00001E4E, - 0x1E4F00001E50, - 0x1E5100001E52, - 0x1E5300001E54, - 0x1E5500001E56, - 0x1E5700001E58, - 0x1E5900001E5A, - 0x1E5B00001E5C, - 0x1E5D00001E5E, - 0x1E5F00001E60, - 0x1E6100001E62, - 0x1E6300001E64, - 0x1E6500001E66, - 0x1E6700001E68, - 0x1E6900001E6A, - 0x1E6B00001E6C, - 0x1E6D00001E6E, - 0x1E6F00001E70, - 0x1E7100001E72, - 0x1E7300001E74, - 0x1E7500001E76, - 0x1E7700001E78, - 0x1E7900001E7A, - 0x1E7B00001E7C, - 0x1E7D00001E7E, - 0x1E7F00001E80, - 0x1E8100001E82, - 0x1E8300001E84, - 0x1E8500001E86, - 0x1E8700001E88, - 0x1E8900001E8A, - 0x1E8B00001E8C, - 0x1E8D00001E8E, - 0x1E8F00001E90, - 0x1E9100001E92, - 0x1E9300001E94, - 0x1E9500001E9A, - 0x1E9C00001E9E, - 0x1E9F00001EA0, - 0x1EA100001EA2, - 0x1EA300001EA4, - 0x1EA500001EA6, - 0x1EA700001EA8, - 0x1EA900001EAA, - 0x1EAB00001EAC, - 0x1EAD00001EAE, - 0x1EAF00001EB0, - 0x1EB100001EB2, - 0x1EB300001EB4, - 0x1EB500001EB6, - 0x1EB700001EB8, - 0x1EB900001EBA, - 0x1EBB00001EBC, - 0x1EBD00001EBE, - 0x1EBF00001EC0, - 0x1EC100001EC2, - 0x1EC300001EC4, - 0x1EC500001EC6, - 0x1EC700001EC8, - 0x1EC900001ECA, - 0x1ECB00001ECC, - 0x1ECD00001ECE, - 0x1ECF00001ED0, - 0x1ED100001ED2, - 0x1ED300001ED4, - 0x1ED500001ED6, - 0x1ED700001ED8, - 0x1ED900001EDA, - 0x1EDB00001EDC, - 0x1EDD00001EDE, - 0x1EDF00001EE0, - 0x1EE100001EE2, - 0x1EE300001EE4, - 0x1EE500001EE6, - 0x1EE700001EE8, - 0x1EE900001EEA, - 0x1EEB00001EEC, - 0x1EED00001EEE, - 0x1EEF00001EF0, - 0x1EF100001EF2, - 0x1EF300001EF4, - 0x1EF500001EF6, - 0x1EF700001EF8, - 0x1EF900001EFA, - 0x1EFB00001EFC, - 0x1EFD00001EFE, - 0x1EFF00001F08, - 0x1F1000001F16, - 0x1F2000001F28, - 0x1F3000001F38, - 0x1F4000001F46, - 0x1F5000001F58, - 0x1F6000001F68, - 0x1F7000001F71, - 0x1F7200001F73, - 0x1F7400001F75, - 0x1F7600001F77, - 0x1F7800001F79, - 0x1F7A00001F7B, - 0x1F7C00001F7D, - 0x1FB000001FB2, - 0x1FB600001FB7, - 0x1FC600001FC7, - 0x1FD000001FD3, - 0x1FD600001FD8, - 0x1FE000001FE3, - 0x1FE400001FE8, - 0x1FF600001FF7, - 0x214E0000214F, - 0x218400002185, - 0x2C3000002C60, - 0x2C6100002C62, - 0x2C6500002C67, - 0x2C6800002C69, - 0x2C6A00002C6B, - 0x2C6C00002C6D, - 0x2C7100002C72, - 0x2C7300002C75, - 0x2C7600002C7C, - 0x2C8100002C82, - 0x2C8300002C84, - 0x2C8500002C86, - 0x2C8700002C88, - 0x2C8900002C8A, - 0x2C8B00002C8C, - 0x2C8D00002C8E, - 0x2C8F00002C90, - 0x2C9100002C92, - 0x2C9300002C94, - 0x2C9500002C96, - 0x2C9700002C98, - 0x2C9900002C9A, - 0x2C9B00002C9C, - 0x2C9D00002C9E, - 0x2C9F00002CA0, - 0x2CA100002CA2, - 0x2CA300002CA4, - 0x2CA500002CA6, - 0x2CA700002CA8, - 0x2CA900002CAA, - 0x2CAB00002CAC, - 0x2CAD00002CAE, - 0x2CAF00002CB0, - 0x2CB100002CB2, - 0x2CB300002CB4, - 0x2CB500002CB6, - 0x2CB700002CB8, - 0x2CB900002CBA, - 0x2CBB00002CBC, - 0x2CBD00002CBE, - 0x2CBF00002CC0, - 0x2CC100002CC2, - 0x2CC300002CC4, - 0x2CC500002CC6, - 0x2CC700002CC8, - 0x2CC900002CCA, - 0x2CCB00002CCC, - 0x2CCD00002CCE, - 0x2CCF00002CD0, - 0x2CD100002CD2, - 0x2CD300002CD4, - 0x2CD500002CD6, - 0x2CD700002CD8, - 0x2CD900002CDA, - 0x2CDB00002CDC, - 0x2CDD00002CDE, - 0x2CDF00002CE0, - 0x2CE100002CE2, - 0x2CE300002CE5, - 0x2CEC00002CED, - 0x2CEE00002CF2, - 0x2CF300002CF4, - 0x2D0000002D26, - 0x2D2700002D28, - 0x2D2D00002D2E, - 0x2D3000002D68, - 0x2D7F00002D97, - 0x2DA000002DA7, - 0x2DA800002DAF, - 0x2DB000002DB7, - 0x2DB800002DBF, - 0x2DC000002DC7, - 0x2DC800002DCF, - 0x2DD000002DD7, - 0x2DD800002DDF, - 0x2DE000002E00, - 0x2E2F00002E30, - 0x300500003008, - 0x302A0000302E, - 0x303C0000303D, - 0x304100003097, - 0x30990000309B, - 0x309D0000309F, - 0x30A1000030FB, - 0x30FC000030FF, - 0x310500003130, - 0x31A0000031C0, - 0x31F000003200, - 0x340000004DC0, - 0x4E000000A48D, - 0xA4D00000A4FE, - 0xA5000000A60D, - 0xA6100000A62C, - 0xA6410000A642, - 0xA6430000A644, - 0xA6450000A646, - 0xA6470000A648, - 0xA6490000A64A, - 0xA64B0000A64C, - 0xA64D0000A64E, - 0xA64F0000A650, - 0xA6510000A652, - 0xA6530000A654, - 0xA6550000A656, - 0xA6570000A658, - 0xA6590000A65A, - 0xA65B0000A65C, - 0xA65D0000A65E, - 0xA65F0000A660, - 0xA6610000A662, - 0xA6630000A664, - 0xA6650000A666, - 0xA6670000A668, - 0xA6690000A66A, - 0xA66B0000A66C, - 0xA66D0000A670, - 0xA6740000A67E, - 0xA67F0000A680, - 0xA6810000A682, - 0xA6830000A684, - 0xA6850000A686, - 0xA6870000A688, - 0xA6890000A68A, - 0xA68B0000A68C, - 0xA68D0000A68E, - 0xA68F0000A690, - 0xA6910000A692, - 0xA6930000A694, - 0xA6950000A696, - 0xA6970000A698, - 0xA6990000A69A, - 0xA69B0000A69C, - 0xA69E0000A6E6, - 0xA6F00000A6F2, - 0xA7170000A720, - 0xA7230000A724, - 0xA7250000A726, - 0xA7270000A728, - 0xA7290000A72A, - 0xA72B0000A72C, - 0xA72D0000A72E, - 0xA72F0000A732, - 0xA7330000A734, - 0xA7350000A736, - 0xA7370000A738, - 0xA7390000A73A, - 0xA73B0000A73C, - 0xA73D0000A73E, - 0xA73F0000A740, - 0xA7410000A742, - 0xA7430000A744, - 0xA7450000A746, - 0xA7470000A748, - 0xA7490000A74A, - 0xA74B0000A74C, - 0xA74D0000A74E, - 0xA74F0000A750, - 0xA7510000A752, - 0xA7530000A754, - 0xA7550000A756, - 0xA7570000A758, - 0xA7590000A75A, - 0xA75B0000A75C, - 0xA75D0000A75E, - 0xA75F0000A760, - 0xA7610000A762, - 0xA7630000A764, - 0xA7650000A766, - 0xA7670000A768, - 0xA7690000A76A, - 0xA76B0000A76C, - 0xA76D0000A76E, - 0xA76F0000A770, - 0xA7710000A779, - 0xA77A0000A77B, - 0xA77C0000A77D, - 0xA77F0000A780, - 0xA7810000A782, - 0xA7830000A784, - 0xA7850000A786, - 0xA7870000A789, - 0xA78C0000A78D, - 0xA78E0000A790, - 0xA7910000A792, - 0xA7930000A796, - 0xA7970000A798, - 0xA7990000A79A, - 0xA79B0000A79C, - 0xA79D0000A79E, - 0xA79F0000A7A0, - 0xA7A10000A7A2, - 0xA7A30000A7A4, - 0xA7A50000A7A6, - 0xA7A70000A7A8, - 0xA7A90000A7AA, - 0xA7AF0000A7B0, - 0xA7B50000A7B6, - 0xA7B70000A7B8, - 0xA7B90000A7BA, - 0xA7BB0000A7BC, - 0xA7BD0000A7BE, - 0xA7BF0000A7C0, - 0xA7C10000A7C2, - 0xA7C30000A7C4, - 0xA7C80000A7C9, - 0xA7CA0000A7CB, - 0xA7D10000A7D2, - 0xA7D30000A7D4, - 0xA7D50000A7D6, - 0xA7D70000A7D8, - 0xA7D90000A7DA, - 0xA7F60000A7F8, - 0xA7FA0000A828, - 0xA82C0000A82D, - 0xA8400000A874, - 0xA8800000A8C6, - 0xA8D00000A8DA, - 0xA8E00000A8F8, - 0xA8FB0000A8FC, - 0xA8FD0000A92E, - 0xA9300000A954, - 0xA9800000A9C1, - 0xA9CF0000A9DA, - 0xA9E00000A9FF, - 0xAA000000AA37, - 0xAA400000AA4E, - 0xAA500000AA5A, - 0xAA600000AA77, - 0xAA7A0000AAC3, - 0xAADB0000AADE, - 0xAAE00000AAF0, - 0xAAF20000AAF7, - 0xAB010000AB07, - 0xAB090000AB0F, - 0xAB110000AB17, - 0xAB200000AB27, - 0xAB280000AB2F, - 0xAB300000AB5B, - 0xAB600000AB69, - 0xABC00000ABEB, - 0xABEC0000ABEE, - 0xABF00000ABFA, - 0xAC000000D7A4, - 0xFA0E0000FA10, - 0xFA110000FA12, - 0xFA130000FA15, - 0xFA1F0000FA20, - 0xFA210000FA22, - 0xFA230000FA25, - 0xFA270000FA2A, - 0xFB1E0000FB1F, - 0xFE200000FE30, - 0xFE730000FE74, - 0x100000001000C, - 0x1000D00010027, - 0x100280001003B, - 0x1003C0001003E, - 0x1003F0001004E, - 0x100500001005E, - 0x10080000100FB, - 0x101FD000101FE, - 0x102800001029D, - 0x102A0000102D1, - 0x102E0000102E1, - 0x1030000010320, - 0x1032D00010341, - 0x103420001034A, - 0x103500001037B, - 0x103800001039E, - 0x103A0000103C4, - 0x103C8000103D0, - 0x104280001049E, - 0x104A0000104AA, - 0x104D8000104FC, - 0x1050000010528, - 0x1053000010564, - 0x10597000105A2, - 0x105A3000105B2, - 0x105B3000105BA, - 0x105BB000105BD, - 0x1060000010737, - 0x1074000010756, - 0x1076000010768, - 0x1078000010781, - 0x1080000010806, - 0x1080800010809, - 0x1080A00010836, - 0x1083700010839, - 0x1083C0001083D, - 0x1083F00010856, - 0x1086000010877, - 0x108800001089F, - 0x108E0000108F3, - 0x108F4000108F6, - 0x1090000010916, - 0x109200001093A, - 0x10980000109B8, - 0x109BE000109C0, - 0x10A0000010A04, - 0x10A0500010A07, - 0x10A0C00010A14, - 0x10A1500010A18, - 0x10A1900010A36, - 0x10A3800010A3B, - 0x10A3F00010A40, - 0x10A6000010A7D, - 0x10A8000010A9D, - 0x10AC000010AC8, - 0x10AC900010AE7, - 0x10B0000010B36, - 0x10B4000010B56, - 0x10B6000010B73, - 0x10B8000010B92, - 0x10C0000010C49, - 0x10CC000010CF3, - 0x10D0000010D28, - 0x10D3000010D3A, - 0x10E8000010EAA, - 0x10EAB00010EAD, - 0x10EB000010EB2, - 0x10EFD00010F1D, - 0x10F2700010F28, - 0x10F3000010F51, - 0x10F7000010F86, - 0x10FB000010FC5, - 0x10FE000010FF7, - 0x1100000011047, - 0x1106600011076, - 0x1107F000110BB, - 0x110C2000110C3, - 0x110D0000110E9, - 0x110F0000110FA, - 0x1110000011135, - 0x1113600011140, - 0x1114400011148, - 0x1115000011174, - 0x1117600011177, - 0x11180000111C5, - 0x111C9000111CD, - 0x111CE000111DB, - 0x111DC000111DD, - 0x1120000011212, - 0x1121300011238, - 0x1123E00011242, - 0x1128000011287, - 0x1128800011289, - 0x1128A0001128E, - 0x1128F0001129E, - 0x1129F000112A9, - 0x112B0000112EB, - 0x112F0000112FA, - 0x1130000011304, - 0x113050001130D, - 0x1130F00011311, - 0x1131300011329, - 0x1132A00011331, - 0x1133200011334, - 0x113350001133A, - 0x1133B00011345, - 0x1134700011349, - 0x1134B0001134E, - 0x1135000011351, - 0x1135700011358, - 0x1135D00011364, - 0x113660001136D, - 0x1137000011375, - 0x114000001144B, - 0x114500001145A, - 0x1145E00011462, - 0x11480000114C6, - 0x114C7000114C8, - 0x114D0000114DA, - 0x11580000115B6, - 0x115B8000115C1, - 0x115D8000115DE, - 0x1160000011641, - 0x1164400011645, - 0x116500001165A, - 0x11680000116B9, - 0x116C0000116CA, - 0x117000001171B, - 0x1171D0001172C, - 0x117300001173A, - 0x1174000011747, - 0x118000001183B, - 0x118C0000118EA, - 0x118FF00011907, - 0x119090001190A, - 0x1190C00011914, - 0x1191500011917, - 0x1191800011936, - 0x1193700011939, - 0x1193B00011944, - 0x119500001195A, - 0x119A0000119A8, - 0x119AA000119D8, - 0x119DA000119E2, - 0x119E3000119E5, - 0x11A0000011A3F, - 0x11A4700011A48, - 0x11A5000011A9A, - 0x11A9D00011A9E, - 0x11AB000011AF9, - 0x11C0000011C09, - 0x11C0A00011C37, - 0x11C3800011C41, - 0x11C5000011C5A, - 0x11C7200011C90, - 0x11C9200011CA8, - 0x11CA900011CB7, - 0x11D0000011D07, - 0x11D0800011D0A, - 0x11D0B00011D37, - 0x11D3A00011D3B, - 0x11D3C00011D3E, - 0x11D3F00011D48, - 0x11D5000011D5A, - 0x11D6000011D66, - 0x11D6700011D69, - 0x11D6A00011D8F, - 0x11D9000011D92, - 0x11D9300011D99, - 0x11DA000011DAA, - 0x11EE000011EF7, - 0x11F0000011F11, - 0x11F1200011F3B, - 0x11F3E00011F43, - 0x11F5000011F5A, - 0x11FB000011FB1, - 0x120000001239A, - 0x1248000012544, - 0x12F9000012FF1, - 0x1300000013430, - 0x1344000013456, - 0x1440000014647, - 0x1680000016A39, - 0x16A4000016A5F, - 0x16A6000016A6A, - 0x16A7000016ABF, - 0x16AC000016ACA, - 0x16AD000016AEE, - 0x16AF000016AF5, - 0x16B0000016B37, - 0x16B4000016B44, - 0x16B5000016B5A, - 0x16B6300016B78, - 0x16B7D00016B90, - 0x16E6000016E80, - 0x16F0000016F4B, - 0x16F4F00016F88, - 0x16F8F00016FA0, - 0x16FE000016FE2, - 0x16FE300016FE5, - 0x16FF000016FF2, - 0x17000000187F8, - 0x1880000018CD6, - 0x18D0000018D09, - 0x1AFF00001AFF4, - 0x1AFF50001AFFC, - 0x1AFFD0001AFFF, - 0x1B0000001B123, - 0x1B1320001B133, - 0x1B1500001B153, - 0x1B1550001B156, - 0x1B1640001B168, - 0x1B1700001B2FC, - 0x1BC000001BC6B, - 0x1BC700001BC7D, - 0x1BC800001BC89, - 0x1BC900001BC9A, - 0x1BC9D0001BC9F, - 0x1CF000001CF2E, - 0x1CF300001CF47, - 0x1DA000001DA37, - 0x1DA3B0001DA6D, - 0x1DA750001DA76, - 0x1DA840001DA85, - 0x1DA9B0001DAA0, - 0x1DAA10001DAB0, - 0x1DF000001DF1F, - 0x1DF250001DF2B, - 0x1E0000001E007, - 0x1E0080001E019, - 0x1E01B0001E022, - 0x1E0230001E025, - 0x1E0260001E02B, - 0x1E08F0001E090, - 0x1E1000001E12D, - 0x1E1300001E13E, - 0x1E1400001E14A, - 0x1E14E0001E14F, - 0x1E2900001E2AF, - 0x1E2C00001E2FA, - 0x1E4D00001E4FA, - 0x1E7E00001E7E7, - 0x1E7E80001E7EC, - 0x1E7ED0001E7EF, - 0x1E7F00001E7FF, - 0x1E8000001E8C5, - 0x1E8D00001E8D7, - 0x1E9220001E94C, - 0x1E9500001E95A, - 0x200000002A6E0, - 0x2A7000002B73A, - 0x2B7400002B81E, - 0x2B8200002CEA2, - 0x2CEB00002EBE1, - 0x2EBF00002EE5E, - 0x300000003134B, - 0x31350000323B0, - ), - "CONTEXTJ": (0x200C0000200E,), - "CONTEXTO": ( - 0xB7000000B8, - 0x37500000376, - 0x5F3000005F5, - 0x6600000066A, - 0x6F0000006FA, - 0x30FB000030FC, - ), -} diff --git a/myenv/Lib/site-packages/idna/intranges.py b/myenv/Lib/site-packages/idna/intranges.py deleted file mode 100644 index 7bfaa8d..0000000 --- a/myenv/Lib/site-packages/idna/intranges.py +++ /dev/null @@ -1,57 +0,0 @@ -""" -Given a list of integers, made up of (hopefully) a small number of long runs -of consecutive integers, compute a representation of the form -((start1, end1), (start2, end2) ...). Then answer the question "was x present -in the original list?" in time O(log(# runs)). -""" - -import bisect -from typing import List, Tuple - - -def intranges_from_list(list_: List[int]) -> Tuple[int, ...]: - """Represent a list of integers as a sequence of ranges: - ((start_0, end_0), (start_1, end_1), ...), such that the original - integers are exactly those x such that start_i <= x < end_i for some i. - - Ranges are encoded as single integers (start << 32 | end), not as tuples. - """ - - sorted_list = sorted(list_) - ranges = [] - last_write = -1 - for i in range(len(sorted_list)): - if i + 1 < len(sorted_list): - if sorted_list[i] == sorted_list[i + 1] - 1: - continue - current_range = sorted_list[last_write + 1 : i + 1] - ranges.append(_encode_range(current_range[0], current_range[-1] + 1)) - last_write = i - - return tuple(ranges) - - -def _encode_range(start: int, end: int) -> int: - return (start << 32) | end - - -def _decode_range(r: int) -> Tuple[int, int]: - return (r >> 32), (r & ((1 << 32) - 1)) - - -def intranges_contain(int_: int, ranges: Tuple[int, ...]) -> bool: - """Determine if `int_` falls into one of the ranges in `ranges`.""" - tuple_ = _encode_range(int_, 0) - pos = bisect.bisect_left(ranges, tuple_) - # we could be immediately ahead of a tuple (start, end) - # with start < int_ <= end - if pos > 0: - left, right = _decode_range(ranges[pos - 1]) - if left <= int_ < right: - return True - # or we could be immediately behind a tuple (int_, end) - if pos < len(ranges): - left, _ = _decode_range(ranges[pos]) - if left == int_: - return True - return False diff --git a/myenv/Lib/site-packages/idna/package_data.py b/myenv/Lib/site-packages/idna/package_data.py deleted file mode 100644 index 514ff7e..0000000 --- a/myenv/Lib/site-packages/idna/package_data.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "3.10" diff --git a/myenv/Lib/site-packages/idna/py.typed b/myenv/Lib/site-packages/idna/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/idna/uts46data.py b/myenv/Lib/site-packages/idna/uts46data.py deleted file mode 100644 index eb89432..0000000 --- a/myenv/Lib/site-packages/idna/uts46data.py +++ /dev/null @@ -1,8681 +0,0 @@ -# This file is automatically generated by tools/idna-data -# vim: set fileencoding=utf-8 : - -from typing import List, Tuple, Union - -"""IDNA Mapping Table from UTS46.""" - - -__version__ = "15.1.0" - - -def _seg_0() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x0, "3"), - (0x1, "3"), - (0x2, "3"), - (0x3, "3"), - (0x4, "3"), - (0x5, "3"), - (0x6, "3"), - (0x7, "3"), - (0x8, "3"), - (0x9, "3"), - (0xA, "3"), - (0xB, "3"), - (0xC, "3"), - (0xD, "3"), - (0xE, "3"), - (0xF, "3"), - (0x10, "3"), - (0x11, "3"), - (0x12, "3"), - (0x13, "3"), - (0x14, "3"), - (0x15, "3"), - (0x16, "3"), - (0x17, "3"), - (0x18, "3"), - (0x19, "3"), - (0x1A, "3"), - (0x1B, "3"), - (0x1C, "3"), - (0x1D, "3"), - (0x1E, "3"), - (0x1F, "3"), - (0x20, "3"), - (0x21, "3"), - (0x22, "3"), - (0x23, "3"), - (0x24, "3"), - (0x25, "3"), - (0x26, "3"), - (0x27, "3"), - (0x28, "3"), - (0x29, "3"), - (0x2A, "3"), - (0x2B, "3"), - (0x2C, "3"), - (0x2D, "V"), - (0x2E, "V"), - (0x2F, "3"), - (0x30, "V"), - (0x31, "V"), - (0x32, "V"), - (0x33, "V"), - (0x34, "V"), - (0x35, "V"), - (0x36, "V"), - (0x37, "V"), - (0x38, "V"), - (0x39, "V"), - (0x3A, "3"), - (0x3B, "3"), - (0x3C, "3"), - (0x3D, "3"), - (0x3E, "3"), - (0x3F, "3"), - (0x40, "3"), - (0x41, "M", "a"), - (0x42, "M", "b"), - (0x43, "M", "c"), - (0x44, "M", "d"), - (0x45, "M", "e"), - (0x46, "M", "f"), - (0x47, "M", "g"), - (0x48, "M", "h"), - (0x49, "M", "i"), - (0x4A, "M", "j"), - (0x4B, "M", "k"), - (0x4C, "M", "l"), - (0x4D, "M", "m"), - (0x4E, "M", "n"), - (0x4F, "M", "o"), - (0x50, "M", "p"), - (0x51, "M", "q"), - (0x52, "M", "r"), - (0x53, "M", "s"), - (0x54, "M", "t"), - (0x55, "M", "u"), - (0x56, "M", "v"), - (0x57, "M", "w"), - (0x58, "M", "x"), - (0x59, "M", "y"), - (0x5A, "M", "z"), - (0x5B, "3"), - (0x5C, "3"), - (0x5D, "3"), - (0x5E, "3"), - (0x5F, "3"), - (0x60, "3"), - (0x61, "V"), - (0x62, "V"), - (0x63, "V"), - ] - - -def _seg_1() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x64, "V"), - (0x65, "V"), - (0x66, "V"), - (0x67, "V"), - (0x68, "V"), - (0x69, "V"), - (0x6A, "V"), - (0x6B, "V"), - (0x6C, "V"), - (0x6D, "V"), - (0x6E, "V"), - (0x6F, "V"), - (0x70, "V"), - (0x71, "V"), - (0x72, "V"), - (0x73, "V"), - (0x74, "V"), - (0x75, "V"), - (0x76, "V"), - (0x77, "V"), - (0x78, "V"), - (0x79, "V"), - (0x7A, "V"), - (0x7B, "3"), - (0x7C, "3"), - (0x7D, "3"), - (0x7E, "3"), - (0x7F, "3"), - (0x80, "X"), - (0x81, "X"), - (0x82, "X"), - (0x83, "X"), - (0x84, "X"), - (0x85, "X"), - (0x86, "X"), - (0x87, "X"), - (0x88, "X"), - (0x89, "X"), - (0x8A, "X"), - (0x8B, "X"), - (0x8C, "X"), - (0x8D, "X"), - (0x8E, "X"), - (0x8F, "X"), - (0x90, "X"), - (0x91, "X"), - (0x92, "X"), - (0x93, "X"), - (0x94, "X"), - (0x95, "X"), - (0x96, "X"), - (0x97, "X"), - (0x98, "X"), - (0x99, "X"), - (0x9A, "X"), - (0x9B, "X"), - (0x9C, "X"), - (0x9D, "X"), - (0x9E, "X"), - (0x9F, "X"), - (0xA0, "3", " "), - (0xA1, "V"), - (0xA2, "V"), - (0xA3, "V"), - (0xA4, "V"), - (0xA5, "V"), - (0xA6, "V"), - (0xA7, "V"), - (0xA8, "3", " ̈"), - (0xA9, "V"), - (0xAA, "M", "a"), - (0xAB, "V"), - (0xAC, "V"), - (0xAD, "I"), - (0xAE, "V"), - (0xAF, "3", " ̄"), - (0xB0, "V"), - (0xB1, "V"), - (0xB2, "M", "2"), - (0xB3, "M", "3"), - (0xB4, "3", " ́"), - (0xB5, "M", "μ"), - (0xB6, "V"), - (0xB7, "V"), - (0xB8, "3", " ̧"), - (0xB9, "M", "1"), - (0xBA, "M", "o"), - (0xBB, "V"), - (0xBC, "M", "1⁄4"), - (0xBD, "M", "1⁄2"), - (0xBE, "M", "3⁄4"), - (0xBF, "V"), - (0xC0, "M", "à"), - (0xC1, "M", "á"), - (0xC2, "M", "â"), - (0xC3, "M", "ã"), - (0xC4, "M", "ä"), - (0xC5, "M", "å"), - (0xC6, "M", "æ"), - (0xC7, "M", "ç"), - ] - - -def _seg_2() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xC8, "M", "è"), - (0xC9, "M", "é"), - (0xCA, "M", "ê"), - (0xCB, "M", "ë"), - (0xCC, "M", "ì"), - (0xCD, "M", "í"), - (0xCE, "M", "î"), - (0xCF, "M", "ï"), - (0xD0, "M", "ð"), - (0xD1, "M", "ñ"), - (0xD2, "M", "ò"), - (0xD3, "M", "ó"), - (0xD4, "M", "ô"), - (0xD5, "M", "õ"), - (0xD6, "M", "ö"), - (0xD7, "V"), - (0xD8, "M", "ø"), - (0xD9, "M", "ù"), - (0xDA, "M", "ú"), - (0xDB, "M", "û"), - (0xDC, "M", "ü"), - (0xDD, "M", "ý"), - (0xDE, "M", "þ"), - (0xDF, "D", "ss"), - (0xE0, "V"), - (0xE1, "V"), - (0xE2, "V"), - (0xE3, "V"), - (0xE4, "V"), - (0xE5, "V"), - (0xE6, "V"), - (0xE7, "V"), - (0xE8, "V"), - (0xE9, "V"), - (0xEA, "V"), - (0xEB, "V"), - (0xEC, "V"), - (0xED, "V"), - (0xEE, "V"), - (0xEF, "V"), - (0xF0, "V"), - (0xF1, "V"), - (0xF2, "V"), - (0xF3, "V"), - (0xF4, "V"), - (0xF5, "V"), - (0xF6, "V"), - (0xF7, "V"), - (0xF8, "V"), - (0xF9, "V"), - (0xFA, "V"), - (0xFB, "V"), - (0xFC, "V"), - (0xFD, "V"), - (0xFE, "V"), - (0xFF, "V"), - (0x100, "M", "ā"), - (0x101, "V"), - (0x102, "M", "ă"), - (0x103, "V"), - (0x104, "M", "ą"), - (0x105, "V"), - (0x106, "M", "ć"), - (0x107, "V"), - (0x108, "M", "ĉ"), - (0x109, "V"), - (0x10A, "M", "ċ"), - (0x10B, "V"), - (0x10C, "M", "č"), - (0x10D, "V"), - (0x10E, "M", "ď"), - (0x10F, "V"), - (0x110, "M", "đ"), - (0x111, "V"), - (0x112, "M", "ē"), - (0x113, "V"), - (0x114, "M", "ĕ"), - (0x115, "V"), - (0x116, "M", "ė"), - (0x117, "V"), - (0x118, "M", "ę"), - (0x119, "V"), - (0x11A, "M", "ě"), - (0x11B, "V"), - (0x11C, "M", "ĝ"), - (0x11D, "V"), - (0x11E, "M", "ğ"), - (0x11F, "V"), - (0x120, "M", "ġ"), - (0x121, "V"), - (0x122, "M", "ģ"), - (0x123, "V"), - (0x124, "M", "ĥ"), - (0x125, "V"), - (0x126, "M", "ħ"), - (0x127, "V"), - (0x128, "M", "ĩ"), - (0x129, "V"), - (0x12A, "M", "ī"), - (0x12B, "V"), - ] - - -def _seg_3() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x12C, "M", "ĭ"), - (0x12D, "V"), - (0x12E, "M", "į"), - (0x12F, "V"), - (0x130, "M", "i̇"), - (0x131, "V"), - (0x132, "M", "ij"), - (0x134, "M", "ĵ"), - (0x135, "V"), - (0x136, "M", "ķ"), - (0x137, "V"), - (0x139, "M", "ĺ"), - (0x13A, "V"), - (0x13B, "M", "ļ"), - (0x13C, "V"), - (0x13D, "M", "ľ"), - (0x13E, "V"), - (0x13F, "M", "l·"), - (0x141, "M", "ł"), - (0x142, "V"), - (0x143, "M", "ń"), - (0x144, "V"), - (0x145, "M", "ņ"), - (0x146, "V"), - (0x147, "M", "ň"), - (0x148, "V"), - (0x149, "M", "ʼn"), - (0x14A, "M", "ŋ"), - (0x14B, "V"), - (0x14C, "M", "ō"), - (0x14D, "V"), - (0x14E, "M", "ŏ"), - (0x14F, "V"), - (0x150, "M", "ő"), - (0x151, "V"), - (0x152, "M", "œ"), - (0x153, "V"), - (0x154, "M", "ŕ"), - (0x155, "V"), - (0x156, "M", "ŗ"), - (0x157, "V"), - (0x158, "M", "ř"), - (0x159, "V"), - (0x15A, "M", "ś"), - (0x15B, "V"), - (0x15C, "M", "ŝ"), - (0x15D, "V"), - (0x15E, "M", "ş"), - (0x15F, "V"), - (0x160, "M", "š"), - (0x161, "V"), - (0x162, "M", "ţ"), - (0x163, "V"), - (0x164, "M", "ť"), - (0x165, "V"), - (0x166, "M", "ŧ"), - (0x167, "V"), - (0x168, "M", "ũ"), - (0x169, "V"), - (0x16A, "M", "ū"), - (0x16B, "V"), - (0x16C, "M", "ŭ"), - (0x16D, "V"), - (0x16E, "M", "ů"), - (0x16F, "V"), - (0x170, "M", "ű"), - (0x171, "V"), - (0x172, "M", "ų"), - (0x173, "V"), - (0x174, "M", "ŵ"), - (0x175, "V"), - (0x176, "M", "ŷ"), - (0x177, "V"), - (0x178, "M", "ÿ"), - (0x179, "M", "ź"), - (0x17A, "V"), - (0x17B, "M", "ż"), - (0x17C, "V"), - (0x17D, "M", "ž"), - (0x17E, "V"), - (0x17F, "M", "s"), - (0x180, "V"), - (0x181, "M", "ɓ"), - (0x182, "M", "ƃ"), - (0x183, "V"), - (0x184, "M", "ƅ"), - (0x185, "V"), - (0x186, "M", "ɔ"), - (0x187, "M", "ƈ"), - (0x188, "V"), - (0x189, "M", "ɖ"), - (0x18A, "M", "ɗ"), - (0x18B, "M", "ƌ"), - (0x18C, "V"), - (0x18E, "M", "ǝ"), - (0x18F, "M", "ə"), - (0x190, "M", "ɛ"), - (0x191, "M", "ƒ"), - (0x192, "V"), - (0x193, "M", "ɠ"), - ] - - -def _seg_4() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x194, "M", "ɣ"), - (0x195, "V"), - (0x196, "M", "ɩ"), - (0x197, "M", "ɨ"), - (0x198, "M", "ƙ"), - (0x199, "V"), - (0x19C, "M", "ɯ"), - (0x19D, "M", "ɲ"), - (0x19E, "V"), - (0x19F, "M", "ɵ"), - (0x1A0, "M", "ơ"), - (0x1A1, "V"), - (0x1A2, "M", "ƣ"), - (0x1A3, "V"), - (0x1A4, "M", "ƥ"), - (0x1A5, "V"), - (0x1A6, "M", "ʀ"), - (0x1A7, "M", "ƨ"), - (0x1A8, "V"), - (0x1A9, "M", "ʃ"), - (0x1AA, "V"), - (0x1AC, "M", "ƭ"), - (0x1AD, "V"), - (0x1AE, "M", "ʈ"), - (0x1AF, "M", "ư"), - (0x1B0, "V"), - (0x1B1, "M", "ʊ"), - (0x1B2, "M", "ʋ"), - (0x1B3, "M", "ƴ"), - (0x1B4, "V"), - (0x1B5, "M", "ƶ"), - (0x1B6, "V"), - (0x1B7, "M", "ʒ"), - (0x1B8, "M", "ƹ"), - (0x1B9, "V"), - (0x1BC, "M", "ƽ"), - (0x1BD, "V"), - (0x1C4, "M", "dž"), - (0x1C7, "M", "lj"), - (0x1CA, "M", "nj"), - (0x1CD, "M", "ǎ"), - (0x1CE, "V"), - (0x1CF, "M", "ǐ"), - (0x1D0, "V"), - (0x1D1, "M", "ǒ"), - (0x1D2, "V"), - (0x1D3, "M", "ǔ"), - (0x1D4, "V"), - (0x1D5, "M", "ǖ"), - (0x1D6, "V"), - (0x1D7, "M", "ǘ"), - (0x1D8, "V"), - (0x1D9, "M", "ǚ"), - (0x1DA, "V"), - (0x1DB, "M", "ǜ"), - (0x1DC, "V"), - (0x1DE, "M", "ǟ"), - (0x1DF, "V"), - (0x1E0, "M", "ǡ"), - (0x1E1, "V"), - (0x1E2, "M", "ǣ"), - (0x1E3, "V"), - (0x1E4, "M", "ǥ"), - (0x1E5, "V"), - (0x1E6, "M", "ǧ"), - (0x1E7, "V"), - (0x1E8, "M", "ǩ"), - (0x1E9, "V"), - (0x1EA, "M", "ǫ"), - (0x1EB, "V"), - (0x1EC, "M", "ǭ"), - (0x1ED, "V"), - (0x1EE, "M", "ǯ"), - (0x1EF, "V"), - (0x1F1, "M", "dz"), - (0x1F4, "M", "ǵ"), - (0x1F5, "V"), - (0x1F6, "M", "ƕ"), - (0x1F7, "M", "ƿ"), - (0x1F8, "M", "ǹ"), - (0x1F9, "V"), - (0x1FA, "M", "ǻ"), - (0x1FB, "V"), - (0x1FC, "M", "ǽ"), - (0x1FD, "V"), - (0x1FE, "M", "ǿ"), - (0x1FF, "V"), - (0x200, "M", "ȁ"), - (0x201, "V"), - (0x202, "M", "ȃ"), - (0x203, "V"), - (0x204, "M", "ȅ"), - (0x205, "V"), - (0x206, "M", "ȇ"), - (0x207, "V"), - (0x208, "M", "ȉ"), - (0x209, "V"), - (0x20A, "M", "ȋ"), - (0x20B, "V"), - (0x20C, "M", "ȍ"), - ] - - -def _seg_5() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x20D, "V"), - (0x20E, "M", "ȏ"), - (0x20F, "V"), - (0x210, "M", "ȑ"), - (0x211, "V"), - (0x212, "M", "ȓ"), - (0x213, "V"), - (0x214, "M", "ȕ"), - (0x215, "V"), - (0x216, "M", "ȗ"), - (0x217, "V"), - (0x218, "M", "ș"), - (0x219, "V"), - (0x21A, "M", "ț"), - (0x21B, "V"), - (0x21C, "M", "ȝ"), - (0x21D, "V"), - (0x21E, "M", "ȟ"), - (0x21F, "V"), - (0x220, "M", "ƞ"), - (0x221, "V"), - (0x222, "M", "ȣ"), - (0x223, "V"), - (0x224, "M", "ȥ"), - (0x225, "V"), - (0x226, "M", "ȧ"), - (0x227, "V"), - (0x228, "M", "ȩ"), - (0x229, "V"), - (0x22A, "M", "ȫ"), - (0x22B, "V"), - (0x22C, "M", "ȭ"), - (0x22D, "V"), - (0x22E, "M", "ȯ"), - (0x22F, "V"), - (0x230, "M", "ȱ"), - (0x231, "V"), - (0x232, "M", "ȳ"), - (0x233, "V"), - (0x23A, "M", "ⱥ"), - (0x23B, "M", "ȼ"), - (0x23C, "V"), - (0x23D, "M", "ƚ"), - (0x23E, "M", "ⱦ"), - (0x23F, "V"), - (0x241, "M", "ɂ"), - (0x242, "V"), - (0x243, "M", "ƀ"), - (0x244, "M", "ʉ"), - (0x245, "M", "ʌ"), - (0x246, "M", "ɇ"), - (0x247, "V"), - (0x248, "M", "ɉ"), - (0x249, "V"), - (0x24A, "M", "ɋ"), - (0x24B, "V"), - (0x24C, "M", "ɍ"), - (0x24D, "V"), - (0x24E, "M", "ɏ"), - (0x24F, "V"), - (0x2B0, "M", "h"), - (0x2B1, "M", "ɦ"), - (0x2B2, "M", "j"), - (0x2B3, "M", "r"), - (0x2B4, "M", "ɹ"), - (0x2B5, "M", "ɻ"), - (0x2B6, "M", "ʁ"), - (0x2B7, "M", "w"), - (0x2B8, "M", "y"), - (0x2B9, "V"), - (0x2D8, "3", " ̆"), - (0x2D9, "3", " ̇"), - (0x2DA, "3", " ̊"), - (0x2DB, "3", " ̨"), - (0x2DC, "3", " ̃"), - (0x2DD, "3", " ̋"), - (0x2DE, "V"), - (0x2E0, "M", "ɣ"), - (0x2E1, "M", "l"), - (0x2E2, "M", "s"), - (0x2E3, "M", "x"), - (0x2E4, "M", "ʕ"), - (0x2E5, "V"), - (0x340, "M", "̀"), - (0x341, "M", "́"), - (0x342, "V"), - (0x343, "M", "̓"), - (0x344, "M", "̈́"), - (0x345, "M", "ι"), - (0x346, "V"), - (0x34F, "I"), - (0x350, "V"), - (0x370, "M", "ͱ"), - (0x371, "V"), - (0x372, "M", "ͳ"), - (0x373, "V"), - (0x374, "M", "ʹ"), - (0x375, "V"), - (0x376, "M", "ͷ"), - (0x377, "V"), - ] - - -def _seg_6() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x378, "X"), - (0x37A, "3", " ι"), - (0x37B, "V"), - (0x37E, "3", ";"), - (0x37F, "M", "ϳ"), - (0x380, "X"), - (0x384, "3", " ́"), - (0x385, "3", " ̈́"), - (0x386, "M", "ά"), - (0x387, "M", "·"), - (0x388, "M", "έ"), - (0x389, "M", "ή"), - (0x38A, "M", "ί"), - (0x38B, "X"), - (0x38C, "M", "ό"), - (0x38D, "X"), - (0x38E, "M", "ύ"), - (0x38F, "M", "ώ"), - (0x390, "V"), - (0x391, "M", "α"), - (0x392, "M", "β"), - (0x393, "M", "γ"), - (0x394, "M", "δ"), - (0x395, "M", "ε"), - (0x396, "M", "ζ"), - (0x397, "M", "η"), - (0x398, "M", "θ"), - (0x399, "M", "ι"), - (0x39A, "M", "κ"), - (0x39B, "M", "λ"), - (0x39C, "M", "μ"), - (0x39D, "M", "ν"), - (0x39E, "M", "ξ"), - (0x39F, "M", "ο"), - (0x3A0, "M", "π"), - (0x3A1, "M", "ρ"), - (0x3A2, "X"), - (0x3A3, "M", "σ"), - (0x3A4, "M", "τ"), - (0x3A5, "M", "υ"), - (0x3A6, "M", "φ"), - (0x3A7, "M", "χ"), - (0x3A8, "M", "ψ"), - (0x3A9, "M", "ω"), - (0x3AA, "M", "ϊ"), - (0x3AB, "M", "ϋ"), - (0x3AC, "V"), - (0x3C2, "D", "σ"), - (0x3C3, "V"), - (0x3CF, "M", "ϗ"), - (0x3D0, "M", "β"), - (0x3D1, "M", "θ"), - (0x3D2, "M", "υ"), - (0x3D3, "M", "ύ"), - (0x3D4, "M", "ϋ"), - (0x3D5, "M", "φ"), - (0x3D6, "M", "π"), - (0x3D7, "V"), - (0x3D8, "M", "ϙ"), - (0x3D9, "V"), - (0x3DA, "M", "ϛ"), - (0x3DB, "V"), - (0x3DC, "M", "ϝ"), - (0x3DD, "V"), - (0x3DE, "M", "ϟ"), - (0x3DF, "V"), - (0x3E0, "M", "ϡ"), - (0x3E1, "V"), - (0x3E2, "M", "ϣ"), - (0x3E3, "V"), - (0x3E4, "M", "ϥ"), - (0x3E5, "V"), - (0x3E6, "M", "ϧ"), - (0x3E7, "V"), - (0x3E8, "M", "ϩ"), - (0x3E9, "V"), - (0x3EA, "M", "ϫ"), - (0x3EB, "V"), - (0x3EC, "M", "ϭ"), - (0x3ED, "V"), - (0x3EE, "M", "ϯ"), - (0x3EF, "V"), - (0x3F0, "M", "κ"), - (0x3F1, "M", "ρ"), - (0x3F2, "M", "σ"), - (0x3F3, "V"), - (0x3F4, "M", "θ"), - (0x3F5, "M", "ε"), - (0x3F6, "V"), - (0x3F7, "M", "ϸ"), - (0x3F8, "V"), - (0x3F9, "M", "σ"), - (0x3FA, "M", "ϻ"), - (0x3FB, "V"), - (0x3FD, "M", "ͻ"), - (0x3FE, "M", "ͼ"), - (0x3FF, "M", "ͽ"), - (0x400, "M", "ѐ"), - (0x401, "M", "ё"), - (0x402, "M", "ђ"), - ] - - -def _seg_7() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x403, "M", "ѓ"), - (0x404, "M", "є"), - (0x405, "M", "ѕ"), - (0x406, "M", "і"), - (0x407, "M", "ї"), - (0x408, "M", "ј"), - (0x409, "M", "љ"), - (0x40A, "M", "њ"), - (0x40B, "M", "ћ"), - (0x40C, "M", "ќ"), - (0x40D, "M", "ѝ"), - (0x40E, "M", "ў"), - (0x40F, "M", "џ"), - (0x410, "M", "а"), - (0x411, "M", "б"), - (0x412, "M", "в"), - (0x413, "M", "г"), - (0x414, "M", "д"), - (0x415, "M", "е"), - (0x416, "M", "ж"), - (0x417, "M", "з"), - (0x418, "M", "и"), - (0x419, "M", "й"), - (0x41A, "M", "к"), - (0x41B, "M", "л"), - (0x41C, "M", "м"), - (0x41D, "M", "н"), - (0x41E, "M", "о"), - (0x41F, "M", "п"), - (0x420, "M", "р"), - (0x421, "M", "с"), - (0x422, "M", "т"), - (0x423, "M", "у"), - (0x424, "M", "ф"), - (0x425, "M", "х"), - (0x426, "M", "ц"), - (0x427, "M", "ч"), - (0x428, "M", "ш"), - (0x429, "M", "щ"), - (0x42A, "M", "ъ"), - (0x42B, "M", "ы"), - (0x42C, "M", "ь"), - (0x42D, "M", "э"), - (0x42E, "M", "ю"), - (0x42F, "M", "я"), - (0x430, "V"), - (0x460, "M", "ѡ"), - (0x461, "V"), - (0x462, "M", "ѣ"), - (0x463, "V"), - (0x464, "M", "ѥ"), - (0x465, "V"), - (0x466, "M", "ѧ"), - (0x467, "V"), - (0x468, "M", "ѩ"), - (0x469, "V"), - (0x46A, "M", "ѫ"), - (0x46B, "V"), - (0x46C, "M", "ѭ"), - (0x46D, "V"), - (0x46E, "M", "ѯ"), - (0x46F, "V"), - (0x470, "M", "ѱ"), - (0x471, "V"), - (0x472, "M", "ѳ"), - (0x473, "V"), - (0x474, "M", "ѵ"), - (0x475, "V"), - (0x476, "M", "ѷ"), - (0x477, "V"), - (0x478, "M", "ѹ"), - (0x479, "V"), - (0x47A, "M", "ѻ"), - (0x47B, "V"), - (0x47C, "M", "ѽ"), - (0x47D, "V"), - (0x47E, "M", "ѿ"), - (0x47F, "V"), - (0x480, "M", "ҁ"), - (0x481, "V"), - (0x48A, "M", "ҋ"), - (0x48B, "V"), - (0x48C, "M", "ҍ"), - (0x48D, "V"), - (0x48E, "M", "ҏ"), - (0x48F, "V"), - (0x490, "M", "ґ"), - (0x491, "V"), - (0x492, "M", "ғ"), - (0x493, "V"), - (0x494, "M", "ҕ"), - (0x495, "V"), - (0x496, "M", "җ"), - (0x497, "V"), - (0x498, "M", "ҙ"), - (0x499, "V"), - (0x49A, "M", "қ"), - (0x49B, "V"), - (0x49C, "M", "ҝ"), - (0x49D, "V"), - ] - - -def _seg_8() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x49E, "M", "ҟ"), - (0x49F, "V"), - (0x4A0, "M", "ҡ"), - (0x4A1, "V"), - (0x4A2, "M", "ң"), - (0x4A3, "V"), - (0x4A4, "M", "ҥ"), - (0x4A5, "V"), - (0x4A6, "M", "ҧ"), - (0x4A7, "V"), - (0x4A8, "M", "ҩ"), - (0x4A9, "V"), - (0x4AA, "M", "ҫ"), - (0x4AB, "V"), - (0x4AC, "M", "ҭ"), - (0x4AD, "V"), - (0x4AE, "M", "ү"), - (0x4AF, "V"), - (0x4B0, "M", "ұ"), - (0x4B1, "V"), - (0x4B2, "M", "ҳ"), - (0x4B3, "V"), - (0x4B4, "M", "ҵ"), - (0x4B5, "V"), - (0x4B6, "M", "ҷ"), - (0x4B7, "V"), - (0x4B8, "M", "ҹ"), - (0x4B9, "V"), - (0x4BA, "M", "һ"), - (0x4BB, "V"), - (0x4BC, "M", "ҽ"), - (0x4BD, "V"), - (0x4BE, "M", "ҿ"), - (0x4BF, "V"), - (0x4C0, "X"), - (0x4C1, "M", "ӂ"), - (0x4C2, "V"), - (0x4C3, "M", "ӄ"), - (0x4C4, "V"), - (0x4C5, "M", "ӆ"), - (0x4C6, "V"), - (0x4C7, "M", "ӈ"), - (0x4C8, "V"), - (0x4C9, "M", "ӊ"), - (0x4CA, "V"), - (0x4CB, "M", "ӌ"), - (0x4CC, "V"), - (0x4CD, "M", "ӎ"), - (0x4CE, "V"), - (0x4D0, "M", "ӑ"), - (0x4D1, "V"), - (0x4D2, "M", "ӓ"), - (0x4D3, "V"), - (0x4D4, "M", "ӕ"), - (0x4D5, "V"), - (0x4D6, "M", "ӗ"), - (0x4D7, "V"), - (0x4D8, "M", "ә"), - (0x4D9, "V"), - (0x4DA, "M", "ӛ"), - (0x4DB, "V"), - (0x4DC, "M", "ӝ"), - (0x4DD, "V"), - (0x4DE, "M", "ӟ"), - (0x4DF, "V"), - (0x4E0, "M", "ӡ"), - (0x4E1, "V"), - (0x4E2, "M", "ӣ"), - (0x4E3, "V"), - (0x4E4, "M", "ӥ"), - (0x4E5, "V"), - (0x4E6, "M", "ӧ"), - (0x4E7, "V"), - (0x4E8, "M", "ө"), - (0x4E9, "V"), - (0x4EA, "M", "ӫ"), - (0x4EB, "V"), - (0x4EC, "M", "ӭ"), - (0x4ED, "V"), - (0x4EE, "M", "ӯ"), - (0x4EF, "V"), - (0x4F0, "M", "ӱ"), - (0x4F1, "V"), - (0x4F2, "M", "ӳ"), - (0x4F3, "V"), - (0x4F4, "M", "ӵ"), - (0x4F5, "V"), - (0x4F6, "M", "ӷ"), - (0x4F7, "V"), - (0x4F8, "M", "ӹ"), - (0x4F9, "V"), - (0x4FA, "M", "ӻ"), - (0x4FB, "V"), - (0x4FC, "M", "ӽ"), - (0x4FD, "V"), - (0x4FE, "M", "ӿ"), - (0x4FF, "V"), - (0x500, "M", "ԁ"), - (0x501, "V"), - (0x502, "M", "ԃ"), - ] - - -def _seg_9() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x503, "V"), - (0x504, "M", "ԅ"), - (0x505, "V"), - (0x506, "M", "ԇ"), - (0x507, "V"), - (0x508, "M", "ԉ"), - (0x509, "V"), - (0x50A, "M", "ԋ"), - (0x50B, "V"), - (0x50C, "M", "ԍ"), - (0x50D, "V"), - (0x50E, "M", "ԏ"), - (0x50F, "V"), - (0x510, "M", "ԑ"), - (0x511, "V"), - (0x512, "M", "ԓ"), - (0x513, "V"), - (0x514, "M", "ԕ"), - (0x515, "V"), - (0x516, "M", "ԗ"), - (0x517, "V"), - (0x518, "M", "ԙ"), - (0x519, "V"), - (0x51A, "M", "ԛ"), - (0x51B, "V"), - (0x51C, "M", "ԝ"), - (0x51D, "V"), - (0x51E, "M", "ԟ"), - (0x51F, "V"), - (0x520, "M", "ԡ"), - (0x521, "V"), - (0x522, "M", "ԣ"), - (0x523, "V"), - (0x524, "M", "ԥ"), - (0x525, "V"), - (0x526, "M", "ԧ"), - (0x527, "V"), - (0x528, "M", "ԩ"), - (0x529, "V"), - (0x52A, "M", "ԫ"), - (0x52B, "V"), - (0x52C, "M", "ԭ"), - (0x52D, "V"), - (0x52E, "M", "ԯ"), - (0x52F, "V"), - (0x530, "X"), - (0x531, "M", "ա"), - (0x532, "M", "բ"), - (0x533, "M", "գ"), - (0x534, "M", "դ"), - (0x535, "M", "ե"), - (0x536, "M", "զ"), - (0x537, "M", "է"), - (0x538, "M", "ը"), - (0x539, "M", "թ"), - (0x53A, "M", "ժ"), - (0x53B, "M", "ի"), - (0x53C, "M", "լ"), - (0x53D, "M", "խ"), - (0x53E, "M", "ծ"), - (0x53F, "M", "կ"), - (0x540, "M", "հ"), - (0x541, "M", "ձ"), - (0x542, "M", "ղ"), - (0x543, "M", "ճ"), - (0x544, "M", "մ"), - (0x545, "M", "յ"), - (0x546, "M", "ն"), - (0x547, "M", "շ"), - (0x548, "M", "ո"), - (0x549, "M", "չ"), - (0x54A, "M", "պ"), - (0x54B, "M", "ջ"), - (0x54C, "M", "ռ"), - (0x54D, "M", "ս"), - (0x54E, "M", "վ"), - (0x54F, "M", "տ"), - (0x550, "M", "ր"), - (0x551, "M", "ց"), - (0x552, "M", "ւ"), - (0x553, "M", "փ"), - (0x554, "M", "ք"), - (0x555, "M", "օ"), - (0x556, "M", "ֆ"), - (0x557, "X"), - (0x559, "V"), - (0x587, "M", "եւ"), - (0x588, "V"), - (0x58B, "X"), - (0x58D, "V"), - (0x590, "X"), - (0x591, "V"), - (0x5C8, "X"), - (0x5D0, "V"), - (0x5EB, "X"), - (0x5EF, "V"), - (0x5F5, "X"), - (0x606, "V"), - (0x61C, "X"), - (0x61D, "V"), - ] - - -def _seg_10() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x675, "M", "اٴ"), - (0x676, "M", "وٴ"), - (0x677, "M", "ۇٴ"), - (0x678, "M", "يٴ"), - (0x679, "V"), - (0x6DD, "X"), - (0x6DE, "V"), - (0x70E, "X"), - (0x710, "V"), - (0x74B, "X"), - (0x74D, "V"), - (0x7B2, "X"), - (0x7C0, "V"), - (0x7FB, "X"), - (0x7FD, "V"), - (0x82E, "X"), - (0x830, "V"), - (0x83F, "X"), - (0x840, "V"), - (0x85C, "X"), - (0x85E, "V"), - (0x85F, "X"), - (0x860, "V"), - (0x86B, "X"), - (0x870, "V"), - (0x88F, "X"), - (0x898, "V"), - (0x8E2, "X"), - (0x8E3, "V"), - (0x958, "M", "क़"), - (0x959, "M", "ख़"), - (0x95A, "M", "ग़"), - (0x95B, "M", "ज़"), - (0x95C, "M", "ड़"), - (0x95D, "M", "ढ़"), - (0x95E, "M", "फ़"), - (0x95F, "M", "य़"), - (0x960, "V"), - (0x984, "X"), - (0x985, "V"), - (0x98D, "X"), - (0x98F, "V"), - (0x991, "X"), - (0x993, "V"), - (0x9A9, "X"), - (0x9AA, "V"), - (0x9B1, "X"), - (0x9B2, "V"), - (0x9B3, "X"), - (0x9B6, "V"), - (0x9BA, "X"), - (0x9BC, "V"), - (0x9C5, "X"), - (0x9C7, "V"), - (0x9C9, "X"), - (0x9CB, "V"), - (0x9CF, "X"), - (0x9D7, "V"), - (0x9D8, "X"), - (0x9DC, "M", "ড়"), - (0x9DD, "M", "ঢ়"), - (0x9DE, "X"), - (0x9DF, "M", "য়"), - (0x9E0, "V"), - (0x9E4, "X"), - (0x9E6, "V"), - (0x9FF, "X"), - (0xA01, "V"), - (0xA04, "X"), - (0xA05, "V"), - (0xA0B, "X"), - (0xA0F, "V"), - (0xA11, "X"), - (0xA13, "V"), - (0xA29, "X"), - (0xA2A, "V"), - (0xA31, "X"), - (0xA32, "V"), - (0xA33, "M", "ਲ਼"), - (0xA34, "X"), - (0xA35, "V"), - (0xA36, "M", "ਸ਼"), - (0xA37, "X"), - (0xA38, "V"), - (0xA3A, "X"), - (0xA3C, "V"), - (0xA3D, "X"), - (0xA3E, "V"), - (0xA43, "X"), - (0xA47, "V"), - (0xA49, "X"), - (0xA4B, "V"), - (0xA4E, "X"), - (0xA51, "V"), - (0xA52, "X"), - (0xA59, "M", "ਖ਼"), - (0xA5A, "M", "ਗ਼"), - (0xA5B, "M", "ਜ਼"), - (0xA5C, "V"), - (0xA5D, "X"), - ] - - -def _seg_11() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA5E, "M", "ਫ਼"), - (0xA5F, "X"), - (0xA66, "V"), - (0xA77, "X"), - (0xA81, "V"), - (0xA84, "X"), - (0xA85, "V"), - (0xA8E, "X"), - (0xA8F, "V"), - (0xA92, "X"), - (0xA93, "V"), - (0xAA9, "X"), - (0xAAA, "V"), - (0xAB1, "X"), - (0xAB2, "V"), - (0xAB4, "X"), - (0xAB5, "V"), - (0xABA, "X"), - (0xABC, "V"), - (0xAC6, "X"), - (0xAC7, "V"), - (0xACA, "X"), - (0xACB, "V"), - (0xACE, "X"), - (0xAD0, "V"), - (0xAD1, "X"), - (0xAE0, "V"), - (0xAE4, "X"), - (0xAE6, "V"), - (0xAF2, "X"), - (0xAF9, "V"), - (0xB00, "X"), - (0xB01, "V"), - (0xB04, "X"), - (0xB05, "V"), - (0xB0D, "X"), - (0xB0F, "V"), - (0xB11, "X"), - (0xB13, "V"), - (0xB29, "X"), - (0xB2A, "V"), - (0xB31, "X"), - (0xB32, "V"), - (0xB34, "X"), - (0xB35, "V"), - (0xB3A, "X"), - (0xB3C, "V"), - (0xB45, "X"), - (0xB47, "V"), - (0xB49, "X"), - (0xB4B, "V"), - (0xB4E, "X"), - (0xB55, "V"), - (0xB58, "X"), - (0xB5C, "M", "ଡ଼"), - (0xB5D, "M", "ଢ଼"), - (0xB5E, "X"), - (0xB5F, "V"), - (0xB64, "X"), - (0xB66, "V"), - (0xB78, "X"), - (0xB82, "V"), - (0xB84, "X"), - (0xB85, "V"), - (0xB8B, "X"), - (0xB8E, "V"), - (0xB91, "X"), - (0xB92, "V"), - (0xB96, "X"), - (0xB99, "V"), - (0xB9B, "X"), - (0xB9C, "V"), - (0xB9D, "X"), - (0xB9E, "V"), - (0xBA0, "X"), - (0xBA3, "V"), - (0xBA5, "X"), - (0xBA8, "V"), - (0xBAB, "X"), - (0xBAE, "V"), - (0xBBA, "X"), - (0xBBE, "V"), - (0xBC3, "X"), - (0xBC6, "V"), - (0xBC9, "X"), - (0xBCA, "V"), - (0xBCE, "X"), - (0xBD0, "V"), - (0xBD1, "X"), - (0xBD7, "V"), - (0xBD8, "X"), - (0xBE6, "V"), - (0xBFB, "X"), - (0xC00, "V"), - (0xC0D, "X"), - (0xC0E, "V"), - (0xC11, "X"), - (0xC12, "V"), - (0xC29, "X"), - (0xC2A, "V"), - ] - - -def _seg_12() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xC3A, "X"), - (0xC3C, "V"), - (0xC45, "X"), - (0xC46, "V"), - (0xC49, "X"), - (0xC4A, "V"), - (0xC4E, "X"), - (0xC55, "V"), - (0xC57, "X"), - (0xC58, "V"), - (0xC5B, "X"), - (0xC5D, "V"), - (0xC5E, "X"), - (0xC60, "V"), - (0xC64, "X"), - (0xC66, "V"), - (0xC70, "X"), - (0xC77, "V"), - (0xC8D, "X"), - (0xC8E, "V"), - (0xC91, "X"), - (0xC92, "V"), - (0xCA9, "X"), - (0xCAA, "V"), - (0xCB4, "X"), - (0xCB5, "V"), - (0xCBA, "X"), - (0xCBC, "V"), - (0xCC5, "X"), - (0xCC6, "V"), - (0xCC9, "X"), - (0xCCA, "V"), - (0xCCE, "X"), - (0xCD5, "V"), - (0xCD7, "X"), - (0xCDD, "V"), - (0xCDF, "X"), - (0xCE0, "V"), - (0xCE4, "X"), - (0xCE6, "V"), - (0xCF0, "X"), - (0xCF1, "V"), - (0xCF4, "X"), - (0xD00, "V"), - (0xD0D, "X"), - (0xD0E, "V"), - (0xD11, "X"), - (0xD12, "V"), - (0xD45, "X"), - (0xD46, "V"), - (0xD49, "X"), - (0xD4A, "V"), - (0xD50, "X"), - (0xD54, "V"), - (0xD64, "X"), - (0xD66, "V"), - (0xD80, "X"), - (0xD81, "V"), - (0xD84, "X"), - (0xD85, "V"), - (0xD97, "X"), - (0xD9A, "V"), - (0xDB2, "X"), - (0xDB3, "V"), - (0xDBC, "X"), - (0xDBD, "V"), - (0xDBE, "X"), - (0xDC0, "V"), - (0xDC7, "X"), - (0xDCA, "V"), - (0xDCB, "X"), - (0xDCF, "V"), - (0xDD5, "X"), - (0xDD6, "V"), - (0xDD7, "X"), - (0xDD8, "V"), - (0xDE0, "X"), - (0xDE6, "V"), - (0xDF0, "X"), - (0xDF2, "V"), - (0xDF5, "X"), - (0xE01, "V"), - (0xE33, "M", "ํา"), - (0xE34, "V"), - (0xE3B, "X"), - (0xE3F, "V"), - (0xE5C, "X"), - (0xE81, "V"), - (0xE83, "X"), - (0xE84, "V"), - (0xE85, "X"), - (0xE86, "V"), - (0xE8B, "X"), - (0xE8C, "V"), - (0xEA4, "X"), - (0xEA5, "V"), - (0xEA6, "X"), - (0xEA7, "V"), - (0xEB3, "M", "ໍາ"), - (0xEB4, "V"), - ] - - -def _seg_13() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xEBE, "X"), - (0xEC0, "V"), - (0xEC5, "X"), - (0xEC6, "V"), - (0xEC7, "X"), - (0xEC8, "V"), - (0xECF, "X"), - (0xED0, "V"), - (0xEDA, "X"), - (0xEDC, "M", "ຫນ"), - (0xEDD, "M", "ຫມ"), - (0xEDE, "V"), - (0xEE0, "X"), - (0xF00, "V"), - (0xF0C, "M", "་"), - (0xF0D, "V"), - (0xF43, "M", "གྷ"), - (0xF44, "V"), - (0xF48, "X"), - (0xF49, "V"), - (0xF4D, "M", "ཌྷ"), - (0xF4E, "V"), - (0xF52, "M", "དྷ"), - (0xF53, "V"), - (0xF57, "M", "བྷ"), - (0xF58, "V"), - (0xF5C, "M", "ཛྷ"), - (0xF5D, "V"), - (0xF69, "M", "ཀྵ"), - (0xF6A, "V"), - (0xF6D, "X"), - (0xF71, "V"), - (0xF73, "M", "ཱི"), - (0xF74, "V"), - (0xF75, "M", "ཱུ"), - (0xF76, "M", "ྲྀ"), - (0xF77, "M", "ྲཱྀ"), - (0xF78, "M", "ླྀ"), - (0xF79, "M", "ླཱྀ"), - (0xF7A, "V"), - (0xF81, "M", "ཱྀ"), - (0xF82, "V"), - (0xF93, "M", "ྒྷ"), - (0xF94, "V"), - (0xF98, "X"), - (0xF99, "V"), - (0xF9D, "M", "ྜྷ"), - (0xF9E, "V"), - (0xFA2, "M", "ྡྷ"), - (0xFA3, "V"), - (0xFA7, "M", "ྦྷ"), - (0xFA8, "V"), - (0xFAC, "M", "ྫྷ"), - (0xFAD, "V"), - (0xFB9, "M", "ྐྵ"), - (0xFBA, "V"), - (0xFBD, "X"), - (0xFBE, "V"), - (0xFCD, "X"), - (0xFCE, "V"), - (0xFDB, "X"), - (0x1000, "V"), - (0x10A0, "X"), - (0x10C7, "M", "ⴧ"), - (0x10C8, "X"), - (0x10CD, "M", "ⴭ"), - (0x10CE, "X"), - (0x10D0, "V"), - (0x10FC, "M", "ნ"), - (0x10FD, "V"), - (0x115F, "X"), - (0x1161, "V"), - (0x1249, "X"), - (0x124A, "V"), - (0x124E, "X"), - (0x1250, "V"), - (0x1257, "X"), - (0x1258, "V"), - (0x1259, "X"), - (0x125A, "V"), - (0x125E, "X"), - (0x1260, "V"), - (0x1289, "X"), - (0x128A, "V"), - (0x128E, "X"), - (0x1290, "V"), - (0x12B1, "X"), - (0x12B2, "V"), - (0x12B6, "X"), - (0x12B8, "V"), - (0x12BF, "X"), - (0x12C0, "V"), - (0x12C1, "X"), - (0x12C2, "V"), - (0x12C6, "X"), - (0x12C8, "V"), - (0x12D7, "X"), - (0x12D8, "V"), - (0x1311, "X"), - (0x1312, "V"), - ] - - -def _seg_14() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1316, "X"), - (0x1318, "V"), - (0x135B, "X"), - (0x135D, "V"), - (0x137D, "X"), - (0x1380, "V"), - (0x139A, "X"), - (0x13A0, "V"), - (0x13F6, "X"), - (0x13F8, "M", "Ᏸ"), - (0x13F9, "M", "Ᏹ"), - (0x13FA, "M", "Ᏺ"), - (0x13FB, "M", "Ᏻ"), - (0x13FC, "M", "Ᏼ"), - (0x13FD, "M", "Ᏽ"), - (0x13FE, "X"), - (0x1400, "V"), - (0x1680, "X"), - (0x1681, "V"), - (0x169D, "X"), - (0x16A0, "V"), - (0x16F9, "X"), - (0x1700, "V"), - (0x1716, "X"), - (0x171F, "V"), - (0x1737, "X"), - (0x1740, "V"), - (0x1754, "X"), - (0x1760, "V"), - (0x176D, "X"), - (0x176E, "V"), - (0x1771, "X"), - (0x1772, "V"), - (0x1774, "X"), - (0x1780, "V"), - (0x17B4, "X"), - (0x17B6, "V"), - (0x17DE, "X"), - (0x17E0, "V"), - (0x17EA, "X"), - (0x17F0, "V"), - (0x17FA, "X"), - (0x1800, "V"), - (0x1806, "X"), - (0x1807, "V"), - (0x180B, "I"), - (0x180E, "X"), - (0x180F, "I"), - (0x1810, "V"), - (0x181A, "X"), - (0x1820, "V"), - (0x1879, "X"), - (0x1880, "V"), - (0x18AB, "X"), - (0x18B0, "V"), - (0x18F6, "X"), - (0x1900, "V"), - (0x191F, "X"), - (0x1920, "V"), - (0x192C, "X"), - (0x1930, "V"), - (0x193C, "X"), - (0x1940, "V"), - (0x1941, "X"), - (0x1944, "V"), - (0x196E, "X"), - (0x1970, "V"), - (0x1975, "X"), - (0x1980, "V"), - (0x19AC, "X"), - (0x19B0, "V"), - (0x19CA, "X"), - (0x19D0, "V"), - (0x19DB, "X"), - (0x19DE, "V"), - (0x1A1C, "X"), - (0x1A1E, "V"), - (0x1A5F, "X"), - (0x1A60, "V"), - (0x1A7D, "X"), - (0x1A7F, "V"), - (0x1A8A, "X"), - (0x1A90, "V"), - (0x1A9A, "X"), - (0x1AA0, "V"), - (0x1AAE, "X"), - (0x1AB0, "V"), - (0x1ACF, "X"), - (0x1B00, "V"), - (0x1B4D, "X"), - (0x1B50, "V"), - (0x1B7F, "X"), - (0x1B80, "V"), - (0x1BF4, "X"), - (0x1BFC, "V"), - (0x1C38, "X"), - (0x1C3B, "V"), - (0x1C4A, "X"), - (0x1C4D, "V"), - (0x1C80, "M", "в"), - ] - - -def _seg_15() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1C81, "M", "д"), - (0x1C82, "M", "о"), - (0x1C83, "M", "с"), - (0x1C84, "M", "т"), - (0x1C86, "M", "ъ"), - (0x1C87, "M", "ѣ"), - (0x1C88, "M", "ꙋ"), - (0x1C89, "X"), - (0x1C90, "M", "ა"), - (0x1C91, "M", "ბ"), - (0x1C92, "M", "გ"), - (0x1C93, "M", "დ"), - (0x1C94, "M", "ე"), - (0x1C95, "M", "ვ"), - (0x1C96, "M", "ზ"), - (0x1C97, "M", "თ"), - (0x1C98, "M", "ი"), - (0x1C99, "M", "კ"), - (0x1C9A, "M", "ლ"), - (0x1C9B, "M", "მ"), - (0x1C9C, "M", "ნ"), - (0x1C9D, "M", "ო"), - (0x1C9E, "M", "პ"), - (0x1C9F, "M", "ჟ"), - (0x1CA0, "M", "რ"), - (0x1CA1, "M", "ს"), - (0x1CA2, "M", "ტ"), - (0x1CA3, "M", "უ"), - (0x1CA4, "M", "ფ"), - (0x1CA5, "M", "ქ"), - (0x1CA6, "M", "ღ"), - (0x1CA7, "M", "ყ"), - (0x1CA8, "M", "შ"), - (0x1CA9, "M", "ჩ"), - (0x1CAA, "M", "ც"), - (0x1CAB, "M", "ძ"), - (0x1CAC, "M", "წ"), - (0x1CAD, "M", "ჭ"), - (0x1CAE, "M", "ხ"), - (0x1CAF, "M", "ჯ"), - (0x1CB0, "M", "ჰ"), - (0x1CB1, "M", "ჱ"), - (0x1CB2, "M", "ჲ"), - (0x1CB3, "M", "ჳ"), - (0x1CB4, "M", "ჴ"), - (0x1CB5, "M", "ჵ"), - (0x1CB6, "M", "ჶ"), - (0x1CB7, "M", "ჷ"), - (0x1CB8, "M", "ჸ"), - (0x1CB9, "M", "ჹ"), - (0x1CBA, "M", "ჺ"), - (0x1CBB, "X"), - (0x1CBD, "M", "ჽ"), - (0x1CBE, "M", "ჾ"), - (0x1CBF, "M", "ჿ"), - (0x1CC0, "V"), - (0x1CC8, "X"), - (0x1CD0, "V"), - (0x1CFB, "X"), - (0x1D00, "V"), - (0x1D2C, "M", "a"), - (0x1D2D, "M", "æ"), - (0x1D2E, "M", "b"), - (0x1D2F, "V"), - (0x1D30, "M", "d"), - (0x1D31, "M", "e"), - (0x1D32, "M", "ǝ"), - (0x1D33, "M", "g"), - (0x1D34, "M", "h"), - (0x1D35, "M", "i"), - (0x1D36, "M", "j"), - (0x1D37, "M", "k"), - (0x1D38, "M", "l"), - (0x1D39, "M", "m"), - (0x1D3A, "M", "n"), - (0x1D3B, "V"), - (0x1D3C, "M", "o"), - (0x1D3D, "M", "ȣ"), - (0x1D3E, "M", "p"), - (0x1D3F, "M", "r"), - (0x1D40, "M", "t"), - (0x1D41, "M", "u"), - (0x1D42, "M", "w"), - (0x1D43, "M", "a"), - (0x1D44, "M", "ɐ"), - (0x1D45, "M", "ɑ"), - (0x1D46, "M", "ᴂ"), - (0x1D47, "M", "b"), - (0x1D48, "M", "d"), - (0x1D49, "M", "e"), - (0x1D4A, "M", "ə"), - (0x1D4B, "M", "ɛ"), - (0x1D4C, "M", "ɜ"), - (0x1D4D, "M", "g"), - (0x1D4E, "V"), - (0x1D4F, "M", "k"), - (0x1D50, "M", "m"), - (0x1D51, "M", "ŋ"), - (0x1D52, "M", "o"), - (0x1D53, "M", "ɔ"), - ] - - -def _seg_16() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D54, "M", "ᴖ"), - (0x1D55, "M", "ᴗ"), - (0x1D56, "M", "p"), - (0x1D57, "M", "t"), - (0x1D58, "M", "u"), - (0x1D59, "M", "ᴝ"), - (0x1D5A, "M", "ɯ"), - (0x1D5B, "M", "v"), - (0x1D5C, "M", "ᴥ"), - (0x1D5D, "M", "β"), - (0x1D5E, "M", "γ"), - (0x1D5F, "M", "δ"), - (0x1D60, "M", "φ"), - (0x1D61, "M", "χ"), - (0x1D62, "M", "i"), - (0x1D63, "M", "r"), - (0x1D64, "M", "u"), - (0x1D65, "M", "v"), - (0x1D66, "M", "β"), - (0x1D67, "M", "γ"), - (0x1D68, "M", "ρ"), - (0x1D69, "M", "φ"), - (0x1D6A, "M", "χ"), - (0x1D6B, "V"), - (0x1D78, "M", "н"), - (0x1D79, "V"), - (0x1D9B, "M", "ɒ"), - (0x1D9C, "M", "c"), - (0x1D9D, "M", "ɕ"), - (0x1D9E, "M", "ð"), - (0x1D9F, "M", "ɜ"), - (0x1DA0, "M", "f"), - (0x1DA1, "M", "ɟ"), - (0x1DA2, "M", "ɡ"), - (0x1DA3, "M", "ɥ"), - (0x1DA4, "M", "ɨ"), - (0x1DA5, "M", "ɩ"), - (0x1DA6, "M", "ɪ"), - (0x1DA7, "M", "ᵻ"), - (0x1DA8, "M", "ʝ"), - (0x1DA9, "M", "ɭ"), - (0x1DAA, "M", "ᶅ"), - (0x1DAB, "M", "ʟ"), - (0x1DAC, "M", "ɱ"), - (0x1DAD, "M", "ɰ"), - (0x1DAE, "M", "ɲ"), - (0x1DAF, "M", "ɳ"), - (0x1DB0, "M", "ɴ"), - (0x1DB1, "M", "ɵ"), - (0x1DB2, "M", "ɸ"), - (0x1DB3, "M", "ʂ"), - (0x1DB4, "M", "ʃ"), - (0x1DB5, "M", "ƫ"), - (0x1DB6, "M", "ʉ"), - (0x1DB7, "M", "ʊ"), - (0x1DB8, "M", "ᴜ"), - (0x1DB9, "M", "ʋ"), - (0x1DBA, "M", "ʌ"), - (0x1DBB, "M", "z"), - (0x1DBC, "M", "ʐ"), - (0x1DBD, "M", "ʑ"), - (0x1DBE, "M", "ʒ"), - (0x1DBF, "M", "θ"), - (0x1DC0, "V"), - (0x1E00, "M", "ḁ"), - (0x1E01, "V"), - (0x1E02, "M", "ḃ"), - (0x1E03, "V"), - (0x1E04, "M", "ḅ"), - (0x1E05, "V"), - (0x1E06, "M", "ḇ"), - (0x1E07, "V"), - (0x1E08, "M", "ḉ"), - (0x1E09, "V"), - (0x1E0A, "M", "ḋ"), - (0x1E0B, "V"), - (0x1E0C, "M", "ḍ"), - (0x1E0D, "V"), - (0x1E0E, "M", "ḏ"), - (0x1E0F, "V"), - (0x1E10, "M", "ḑ"), - (0x1E11, "V"), - (0x1E12, "M", "ḓ"), - (0x1E13, "V"), - (0x1E14, "M", "ḕ"), - (0x1E15, "V"), - (0x1E16, "M", "ḗ"), - (0x1E17, "V"), - (0x1E18, "M", "ḙ"), - (0x1E19, "V"), - (0x1E1A, "M", "ḛ"), - (0x1E1B, "V"), - (0x1E1C, "M", "ḝ"), - (0x1E1D, "V"), - (0x1E1E, "M", "ḟ"), - (0x1E1F, "V"), - (0x1E20, "M", "ḡ"), - (0x1E21, "V"), - (0x1E22, "M", "ḣ"), - (0x1E23, "V"), - ] - - -def _seg_17() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E24, "M", "ḥ"), - (0x1E25, "V"), - (0x1E26, "M", "ḧ"), - (0x1E27, "V"), - (0x1E28, "M", "ḩ"), - (0x1E29, "V"), - (0x1E2A, "M", "ḫ"), - (0x1E2B, "V"), - (0x1E2C, "M", "ḭ"), - (0x1E2D, "V"), - (0x1E2E, "M", "ḯ"), - (0x1E2F, "V"), - (0x1E30, "M", "ḱ"), - (0x1E31, "V"), - (0x1E32, "M", "ḳ"), - (0x1E33, "V"), - (0x1E34, "M", "ḵ"), - (0x1E35, "V"), - (0x1E36, "M", "ḷ"), - (0x1E37, "V"), - (0x1E38, "M", "ḹ"), - (0x1E39, "V"), - (0x1E3A, "M", "ḻ"), - (0x1E3B, "V"), - (0x1E3C, "M", "ḽ"), - (0x1E3D, "V"), - (0x1E3E, "M", "ḿ"), - (0x1E3F, "V"), - (0x1E40, "M", "ṁ"), - (0x1E41, "V"), - (0x1E42, "M", "ṃ"), - (0x1E43, "V"), - (0x1E44, "M", "ṅ"), - (0x1E45, "V"), - (0x1E46, "M", "ṇ"), - (0x1E47, "V"), - (0x1E48, "M", "ṉ"), - (0x1E49, "V"), - (0x1E4A, "M", "ṋ"), - (0x1E4B, "V"), - (0x1E4C, "M", "ṍ"), - (0x1E4D, "V"), - (0x1E4E, "M", "ṏ"), - (0x1E4F, "V"), - (0x1E50, "M", "ṑ"), - (0x1E51, "V"), - (0x1E52, "M", "ṓ"), - (0x1E53, "V"), - (0x1E54, "M", "ṕ"), - (0x1E55, "V"), - (0x1E56, "M", "ṗ"), - (0x1E57, "V"), - (0x1E58, "M", "ṙ"), - (0x1E59, "V"), - (0x1E5A, "M", "ṛ"), - (0x1E5B, "V"), - (0x1E5C, "M", "ṝ"), - (0x1E5D, "V"), - (0x1E5E, "M", "ṟ"), - (0x1E5F, "V"), - (0x1E60, "M", "ṡ"), - (0x1E61, "V"), - (0x1E62, "M", "ṣ"), - (0x1E63, "V"), - (0x1E64, "M", "ṥ"), - (0x1E65, "V"), - (0x1E66, "M", "ṧ"), - (0x1E67, "V"), - (0x1E68, "M", "ṩ"), - (0x1E69, "V"), - (0x1E6A, "M", "ṫ"), - (0x1E6B, "V"), - (0x1E6C, "M", "ṭ"), - (0x1E6D, "V"), - (0x1E6E, "M", "ṯ"), - (0x1E6F, "V"), - (0x1E70, "M", "ṱ"), - (0x1E71, "V"), - (0x1E72, "M", "ṳ"), - (0x1E73, "V"), - (0x1E74, "M", "ṵ"), - (0x1E75, "V"), - (0x1E76, "M", "ṷ"), - (0x1E77, "V"), - (0x1E78, "M", "ṹ"), - (0x1E79, "V"), - (0x1E7A, "M", "ṻ"), - (0x1E7B, "V"), - (0x1E7C, "M", "ṽ"), - (0x1E7D, "V"), - (0x1E7E, "M", "ṿ"), - (0x1E7F, "V"), - (0x1E80, "M", "ẁ"), - (0x1E81, "V"), - (0x1E82, "M", "ẃ"), - (0x1E83, "V"), - (0x1E84, "M", "ẅ"), - (0x1E85, "V"), - (0x1E86, "M", "ẇ"), - (0x1E87, "V"), - ] - - -def _seg_18() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E88, "M", "ẉ"), - (0x1E89, "V"), - (0x1E8A, "M", "ẋ"), - (0x1E8B, "V"), - (0x1E8C, "M", "ẍ"), - (0x1E8D, "V"), - (0x1E8E, "M", "ẏ"), - (0x1E8F, "V"), - (0x1E90, "M", "ẑ"), - (0x1E91, "V"), - (0x1E92, "M", "ẓ"), - (0x1E93, "V"), - (0x1E94, "M", "ẕ"), - (0x1E95, "V"), - (0x1E9A, "M", "aʾ"), - (0x1E9B, "M", "ṡ"), - (0x1E9C, "V"), - (0x1E9E, "M", "ß"), - (0x1E9F, "V"), - (0x1EA0, "M", "ạ"), - (0x1EA1, "V"), - (0x1EA2, "M", "ả"), - (0x1EA3, "V"), - (0x1EA4, "M", "ấ"), - (0x1EA5, "V"), - (0x1EA6, "M", "ầ"), - (0x1EA7, "V"), - (0x1EA8, "M", "ẩ"), - (0x1EA9, "V"), - (0x1EAA, "M", "ẫ"), - (0x1EAB, "V"), - (0x1EAC, "M", "ậ"), - (0x1EAD, "V"), - (0x1EAE, "M", "ắ"), - (0x1EAF, "V"), - (0x1EB0, "M", "ằ"), - (0x1EB1, "V"), - (0x1EB2, "M", "ẳ"), - (0x1EB3, "V"), - (0x1EB4, "M", "ẵ"), - (0x1EB5, "V"), - (0x1EB6, "M", "ặ"), - (0x1EB7, "V"), - (0x1EB8, "M", "ẹ"), - (0x1EB9, "V"), - (0x1EBA, "M", "ẻ"), - (0x1EBB, "V"), - (0x1EBC, "M", "ẽ"), - (0x1EBD, "V"), - (0x1EBE, "M", "ế"), - (0x1EBF, "V"), - (0x1EC0, "M", "ề"), - (0x1EC1, "V"), - (0x1EC2, "M", "ể"), - (0x1EC3, "V"), - (0x1EC4, "M", "ễ"), - (0x1EC5, "V"), - (0x1EC6, "M", "ệ"), - (0x1EC7, "V"), - (0x1EC8, "M", "ỉ"), - (0x1EC9, "V"), - (0x1ECA, "M", "ị"), - (0x1ECB, "V"), - (0x1ECC, "M", "ọ"), - (0x1ECD, "V"), - (0x1ECE, "M", "ỏ"), - (0x1ECF, "V"), - (0x1ED0, "M", "ố"), - (0x1ED1, "V"), - (0x1ED2, "M", "ồ"), - (0x1ED3, "V"), - (0x1ED4, "M", "ổ"), - (0x1ED5, "V"), - (0x1ED6, "M", "ỗ"), - (0x1ED7, "V"), - (0x1ED8, "M", "ộ"), - (0x1ED9, "V"), - (0x1EDA, "M", "ớ"), - (0x1EDB, "V"), - (0x1EDC, "M", "ờ"), - (0x1EDD, "V"), - (0x1EDE, "M", "ở"), - (0x1EDF, "V"), - (0x1EE0, "M", "ỡ"), - (0x1EE1, "V"), - (0x1EE2, "M", "ợ"), - (0x1EE3, "V"), - (0x1EE4, "M", "ụ"), - (0x1EE5, "V"), - (0x1EE6, "M", "ủ"), - (0x1EE7, "V"), - (0x1EE8, "M", "ứ"), - (0x1EE9, "V"), - (0x1EEA, "M", "ừ"), - (0x1EEB, "V"), - (0x1EEC, "M", "ử"), - (0x1EED, "V"), - (0x1EEE, "M", "ữ"), - (0x1EEF, "V"), - (0x1EF0, "M", "ự"), - ] - - -def _seg_19() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EF1, "V"), - (0x1EF2, "M", "ỳ"), - (0x1EF3, "V"), - (0x1EF4, "M", "ỵ"), - (0x1EF5, "V"), - (0x1EF6, "M", "ỷ"), - (0x1EF7, "V"), - (0x1EF8, "M", "ỹ"), - (0x1EF9, "V"), - (0x1EFA, "M", "ỻ"), - (0x1EFB, "V"), - (0x1EFC, "M", "ỽ"), - (0x1EFD, "V"), - (0x1EFE, "M", "ỿ"), - (0x1EFF, "V"), - (0x1F08, "M", "ἀ"), - (0x1F09, "M", "ἁ"), - (0x1F0A, "M", "ἂ"), - (0x1F0B, "M", "ἃ"), - (0x1F0C, "M", "ἄ"), - (0x1F0D, "M", "ἅ"), - (0x1F0E, "M", "ἆ"), - (0x1F0F, "M", "ἇ"), - (0x1F10, "V"), - (0x1F16, "X"), - (0x1F18, "M", "ἐ"), - (0x1F19, "M", "ἑ"), - (0x1F1A, "M", "ἒ"), - (0x1F1B, "M", "ἓ"), - (0x1F1C, "M", "ἔ"), - (0x1F1D, "M", "ἕ"), - (0x1F1E, "X"), - (0x1F20, "V"), - (0x1F28, "M", "ἠ"), - (0x1F29, "M", "ἡ"), - (0x1F2A, "M", "ἢ"), - (0x1F2B, "M", "ἣ"), - (0x1F2C, "M", "ἤ"), - (0x1F2D, "M", "ἥ"), - (0x1F2E, "M", "ἦ"), - (0x1F2F, "M", "ἧ"), - (0x1F30, "V"), - (0x1F38, "M", "ἰ"), - (0x1F39, "M", "ἱ"), - (0x1F3A, "M", "ἲ"), - (0x1F3B, "M", "ἳ"), - (0x1F3C, "M", "ἴ"), - (0x1F3D, "M", "ἵ"), - (0x1F3E, "M", "ἶ"), - (0x1F3F, "M", "ἷ"), - (0x1F40, "V"), - (0x1F46, "X"), - (0x1F48, "M", "ὀ"), - (0x1F49, "M", "ὁ"), - (0x1F4A, "M", "ὂ"), - (0x1F4B, "M", "ὃ"), - (0x1F4C, "M", "ὄ"), - (0x1F4D, "M", "ὅ"), - (0x1F4E, "X"), - (0x1F50, "V"), - (0x1F58, "X"), - (0x1F59, "M", "ὑ"), - (0x1F5A, "X"), - (0x1F5B, "M", "ὓ"), - (0x1F5C, "X"), - (0x1F5D, "M", "ὕ"), - (0x1F5E, "X"), - (0x1F5F, "M", "ὗ"), - (0x1F60, "V"), - (0x1F68, "M", "ὠ"), - (0x1F69, "M", "ὡ"), - (0x1F6A, "M", "ὢ"), - (0x1F6B, "M", "ὣ"), - (0x1F6C, "M", "ὤ"), - (0x1F6D, "M", "ὥ"), - (0x1F6E, "M", "ὦ"), - (0x1F6F, "M", "ὧ"), - (0x1F70, "V"), - (0x1F71, "M", "ά"), - (0x1F72, "V"), - (0x1F73, "M", "έ"), - (0x1F74, "V"), - (0x1F75, "M", "ή"), - (0x1F76, "V"), - (0x1F77, "M", "ί"), - (0x1F78, "V"), - (0x1F79, "M", "ό"), - (0x1F7A, "V"), - (0x1F7B, "M", "ύ"), - (0x1F7C, "V"), - (0x1F7D, "M", "ώ"), - (0x1F7E, "X"), - (0x1F80, "M", "ἀι"), - (0x1F81, "M", "ἁι"), - (0x1F82, "M", "ἂι"), - (0x1F83, "M", "ἃι"), - (0x1F84, "M", "ἄι"), - (0x1F85, "M", "ἅι"), - (0x1F86, "M", "ἆι"), - (0x1F87, "M", "ἇι"), - ] - - -def _seg_20() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F88, "M", "ἀι"), - (0x1F89, "M", "ἁι"), - (0x1F8A, "M", "ἂι"), - (0x1F8B, "M", "ἃι"), - (0x1F8C, "M", "ἄι"), - (0x1F8D, "M", "ἅι"), - (0x1F8E, "M", "ἆι"), - (0x1F8F, "M", "ἇι"), - (0x1F90, "M", "ἠι"), - (0x1F91, "M", "ἡι"), - (0x1F92, "M", "ἢι"), - (0x1F93, "M", "ἣι"), - (0x1F94, "M", "ἤι"), - (0x1F95, "M", "ἥι"), - (0x1F96, "M", "ἦι"), - (0x1F97, "M", "ἧι"), - (0x1F98, "M", "ἠι"), - (0x1F99, "M", "ἡι"), - (0x1F9A, "M", "ἢι"), - (0x1F9B, "M", "ἣι"), - (0x1F9C, "M", "ἤι"), - (0x1F9D, "M", "ἥι"), - (0x1F9E, "M", "ἦι"), - (0x1F9F, "M", "ἧι"), - (0x1FA0, "M", "ὠι"), - (0x1FA1, "M", "ὡι"), - (0x1FA2, "M", "ὢι"), - (0x1FA3, "M", "ὣι"), - (0x1FA4, "M", "ὤι"), - (0x1FA5, "M", "ὥι"), - (0x1FA6, "M", "ὦι"), - (0x1FA7, "M", "ὧι"), - (0x1FA8, "M", "ὠι"), - (0x1FA9, "M", "ὡι"), - (0x1FAA, "M", "ὢι"), - (0x1FAB, "M", "ὣι"), - (0x1FAC, "M", "ὤι"), - (0x1FAD, "M", "ὥι"), - (0x1FAE, "M", "ὦι"), - (0x1FAF, "M", "ὧι"), - (0x1FB0, "V"), - (0x1FB2, "M", "ὰι"), - (0x1FB3, "M", "αι"), - (0x1FB4, "M", "άι"), - (0x1FB5, "X"), - (0x1FB6, "V"), - (0x1FB7, "M", "ᾶι"), - (0x1FB8, "M", "ᾰ"), - (0x1FB9, "M", "ᾱ"), - (0x1FBA, "M", "ὰ"), - (0x1FBB, "M", "ά"), - (0x1FBC, "M", "αι"), - (0x1FBD, "3", " ̓"), - (0x1FBE, "M", "ι"), - (0x1FBF, "3", " ̓"), - (0x1FC0, "3", " ͂"), - (0x1FC1, "3", " ̈͂"), - (0x1FC2, "M", "ὴι"), - (0x1FC3, "M", "ηι"), - (0x1FC4, "M", "ήι"), - (0x1FC5, "X"), - (0x1FC6, "V"), - (0x1FC7, "M", "ῆι"), - (0x1FC8, "M", "ὲ"), - (0x1FC9, "M", "έ"), - (0x1FCA, "M", "ὴ"), - (0x1FCB, "M", "ή"), - (0x1FCC, "M", "ηι"), - (0x1FCD, "3", " ̓̀"), - (0x1FCE, "3", " ̓́"), - (0x1FCF, "3", " ̓͂"), - (0x1FD0, "V"), - (0x1FD3, "M", "ΐ"), - (0x1FD4, "X"), - (0x1FD6, "V"), - (0x1FD8, "M", "ῐ"), - (0x1FD9, "M", "ῑ"), - (0x1FDA, "M", "ὶ"), - (0x1FDB, "M", "ί"), - (0x1FDC, "X"), - (0x1FDD, "3", " ̔̀"), - (0x1FDE, "3", " ̔́"), - (0x1FDF, "3", " ̔͂"), - (0x1FE0, "V"), - (0x1FE3, "M", "ΰ"), - (0x1FE4, "V"), - (0x1FE8, "M", "ῠ"), - (0x1FE9, "M", "ῡ"), - (0x1FEA, "M", "ὺ"), - (0x1FEB, "M", "ύ"), - (0x1FEC, "M", "ῥ"), - (0x1FED, "3", " ̈̀"), - (0x1FEE, "3", " ̈́"), - (0x1FEF, "3", "`"), - (0x1FF0, "X"), - (0x1FF2, "M", "ὼι"), - (0x1FF3, "M", "ωι"), - (0x1FF4, "M", "ώι"), - (0x1FF5, "X"), - (0x1FF6, "V"), - ] - - -def _seg_21() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1FF7, "M", "ῶι"), - (0x1FF8, "M", "ὸ"), - (0x1FF9, "M", "ό"), - (0x1FFA, "M", "ὼ"), - (0x1FFB, "M", "ώ"), - (0x1FFC, "M", "ωι"), - (0x1FFD, "3", " ́"), - (0x1FFE, "3", " ̔"), - (0x1FFF, "X"), - (0x2000, "3", " "), - (0x200B, "I"), - (0x200C, "D", ""), - (0x200E, "X"), - (0x2010, "V"), - (0x2011, "M", "‐"), - (0x2012, "V"), - (0x2017, "3", " ̳"), - (0x2018, "V"), - (0x2024, "X"), - (0x2027, "V"), - (0x2028, "X"), - (0x202F, "3", " "), - (0x2030, "V"), - (0x2033, "M", "′′"), - (0x2034, "M", "′′′"), - (0x2035, "V"), - (0x2036, "M", "‵‵"), - (0x2037, "M", "‵‵‵"), - (0x2038, "V"), - (0x203C, "3", "!!"), - (0x203D, "V"), - (0x203E, "3", " ̅"), - (0x203F, "V"), - (0x2047, "3", "??"), - (0x2048, "3", "?!"), - (0x2049, "3", "!?"), - (0x204A, "V"), - (0x2057, "M", "′′′′"), - (0x2058, "V"), - (0x205F, "3", " "), - (0x2060, "I"), - (0x2061, "X"), - (0x2064, "I"), - (0x2065, "X"), - (0x2070, "M", "0"), - (0x2071, "M", "i"), - (0x2072, "X"), - (0x2074, "M", "4"), - (0x2075, "M", "5"), - (0x2076, "M", "6"), - (0x2077, "M", "7"), - (0x2078, "M", "8"), - (0x2079, "M", "9"), - (0x207A, "3", "+"), - (0x207B, "M", "−"), - (0x207C, "3", "="), - (0x207D, "3", "("), - (0x207E, "3", ")"), - (0x207F, "M", "n"), - (0x2080, "M", "0"), - (0x2081, "M", "1"), - (0x2082, "M", "2"), - (0x2083, "M", "3"), - (0x2084, "M", "4"), - (0x2085, "M", "5"), - (0x2086, "M", "6"), - (0x2087, "M", "7"), - (0x2088, "M", "8"), - (0x2089, "M", "9"), - (0x208A, "3", "+"), - (0x208B, "M", "−"), - (0x208C, "3", "="), - (0x208D, "3", "("), - (0x208E, "3", ")"), - (0x208F, "X"), - (0x2090, "M", "a"), - (0x2091, "M", "e"), - (0x2092, "M", "o"), - (0x2093, "M", "x"), - (0x2094, "M", "ə"), - (0x2095, "M", "h"), - (0x2096, "M", "k"), - (0x2097, "M", "l"), - (0x2098, "M", "m"), - (0x2099, "M", "n"), - (0x209A, "M", "p"), - (0x209B, "M", "s"), - (0x209C, "M", "t"), - (0x209D, "X"), - (0x20A0, "V"), - (0x20A8, "M", "rs"), - (0x20A9, "V"), - (0x20C1, "X"), - (0x20D0, "V"), - (0x20F1, "X"), - (0x2100, "3", "a/c"), - (0x2101, "3", "a/s"), - (0x2102, "M", "c"), - (0x2103, "M", "°c"), - (0x2104, "V"), - ] - - -def _seg_22() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2105, "3", "c/o"), - (0x2106, "3", "c/u"), - (0x2107, "M", "ɛ"), - (0x2108, "V"), - (0x2109, "M", "°f"), - (0x210A, "M", "g"), - (0x210B, "M", "h"), - (0x210F, "M", "ħ"), - (0x2110, "M", "i"), - (0x2112, "M", "l"), - (0x2114, "V"), - (0x2115, "M", "n"), - (0x2116, "M", "no"), - (0x2117, "V"), - (0x2119, "M", "p"), - (0x211A, "M", "q"), - (0x211B, "M", "r"), - (0x211E, "V"), - (0x2120, "M", "sm"), - (0x2121, "M", "tel"), - (0x2122, "M", "tm"), - (0x2123, "V"), - (0x2124, "M", "z"), - (0x2125, "V"), - (0x2126, "M", "ω"), - (0x2127, "V"), - (0x2128, "M", "z"), - (0x2129, "V"), - (0x212A, "M", "k"), - (0x212B, "M", "å"), - (0x212C, "M", "b"), - (0x212D, "M", "c"), - (0x212E, "V"), - (0x212F, "M", "e"), - (0x2131, "M", "f"), - (0x2132, "X"), - (0x2133, "M", "m"), - (0x2134, "M", "o"), - (0x2135, "M", "א"), - (0x2136, "M", "ב"), - (0x2137, "M", "ג"), - (0x2138, "M", "ד"), - (0x2139, "M", "i"), - (0x213A, "V"), - (0x213B, "M", "fax"), - (0x213C, "M", "π"), - (0x213D, "M", "γ"), - (0x213F, "M", "π"), - (0x2140, "M", "∑"), - (0x2141, "V"), - (0x2145, "M", "d"), - (0x2147, "M", "e"), - (0x2148, "M", "i"), - (0x2149, "M", "j"), - (0x214A, "V"), - (0x2150, "M", "1⁄7"), - (0x2151, "M", "1⁄9"), - (0x2152, "M", "1⁄10"), - (0x2153, "M", "1⁄3"), - (0x2154, "M", "2⁄3"), - (0x2155, "M", "1⁄5"), - (0x2156, "M", "2⁄5"), - (0x2157, "M", "3⁄5"), - (0x2158, "M", "4⁄5"), - (0x2159, "M", "1⁄6"), - (0x215A, "M", "5⁄6"), - (0x215B, "M", "1⁄8"), - (0x215C, "M", "3⁄8"), - (0x215D, "M", "5⁄8"), - (0x215E, "M", "7⁄8"), - (0x215F, "M", "1⁄"), - (0x2160, "M", "i"), - (0x2161, "M", "ii"), - (0x2162, "M", "iii"), - (0x2163, "M", "iv"), - (0x2164, "M", "v"), - (0x2165, "M", "vi"), - (0x2166, "M", "vii"), - (0x2167, "M", "viii"), - (0x2168, "M", "ix"), - (0x2169, "M", "x"), - (0x216A, "M", "xi"), - (0x216B, "M", "xii"), - (0x216C, "M", "l"), - (0x216D, "M", "c"), - (0x216E, "M", "d"), - (0x216F, "M", "m"), - (0x2170, "M", "i"), - (0x2171, "M", "ii"), - (0x2172, "M", "iii"), - (0x2173, "M", "iv"), - (0x2174, "M", "v"), - (0x2175, "M", "vi"), - (0x2176, "M", "vii"), - (0x2177, "M", "viii"), - (0x2178, "M", "ix"), - (0x2179, "M", "x"), - (0x217A, "M", "xi"), - (0x217B, "M", "xii"), - (0x217C, "M", "l"), - ] - - -def _seg_23() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x217D, "M", "c"), - (0x217E, "M", "d"), - (0x217F, "M", "m"), - (0x2180, "V"), - (0x2183, "X"), - (0x2184, "V"), - (0x2189, "M", "0⁄3"), - (0x218A, "V"), - (0x218C, "X"), - (0x2190, "V"), - (0x222C, "M", "∫∫"), - (0x222D, "M", "∫∫∫"), - (0x222E, "V"), - (0x222F, "M", "∮∮"), - (0x2230, "M", "∮∮∮"), - (0x2231, "V"), - (0x2329, "M", "〈"), - (0x232A, "M", "〉"), - (0x232B, "V"), - (0x2427, "X"), - (0x2440, "V"), - (0x244B, "X"), - (0x2460, "M", "1"), - (0x2461, "M", "2"), - (0x2462, "M", "3"), - (0x2463, "M", "4"), - (0x2464, "M", "5"), - (0x2465, "M", "6"), - (0x2466, "M", "7"), - (0x2467, "M", "8"), - (0x2468, "M", "9"), - (0x2469, "M", "10"), - (0x246A, "M", "11"), - (0x246B, "M", "12"), - (0x246C, "M", "13"), - (0x246D, "M", "14"), - (0x246E, "M", "15"), - (0x246F, "M", "16"), - (0x2470, "M", "17"), - (0x2471, "M", "18"), - (0x2472, "M", "19"), - (0x2473, "M", "20"), - (0x2474, "3", "(1)"), - (0x2475, "3", "(2)"), - (0x2476, "3", "(3)"), - (0x2477, "3", "(4)"), - (0x2478, "3", "(5)"), - (0x2479, "3", "(6)"), - (0x247A, "3", "(7)"), - (0x247B, "3", "(8)"), - (0x247C, "3", "(9)"), - (0x247D, "3", "(10)"), - (0x247E, "3", "(11)"), - (0x247F, "3", "(12)"), - (0x2480, "3", "(13)"), - (0x2481, "3", "(14)"), - (0x2482, "3", "(15)"), - (0x2483, "3", "(16)"), - (0x2484, "3", "(17)"), - (0x2485, "3", "(18)"), - (0x2486, "3", "(19)"), - (0x2487, "3", "(20)"), - (0x2488, "X"), - (0x249C, "3", "(a)"), - (0x249D, "3", "(b)"), - (0x249E, "3", "(c)"), - (0x249F, "3", "(d)"), - (0x24A0, "3", "(e)"), - (0x24A1, "3", "(f)"), - (0x24A2, "3", "(g)"), - (0x24A3, "3", "(h)"), - (0x24A4, "3", "(i)"), - (0x24A5, "3", "(j)"), - (0x24A6, "3", "(k)"), - (0x24A7, "3", "(l)"), - (0x24A8, "3", "(m)"), - (0x24A9, "3", "(n)"), - (0x24AA, "3", "(o)"), - (0x24AB, "3", "(p)"), - (0x24AC, "3", "(q)"), - (0x24AD, "3", "(r)"), - (0x24AE, "3", "(s)"), - (0x24AF, "3", "(t)"), - (0x24B0, "3", "(u)"), - (0x24B1, "3", "(v)"), - (0x24B2, "3", "(w)"), - (0x24B3, "3", "(x)"), - (0x24B4, "3", "(y)"), - (0x24B5, "3", "(z)"), - (0x24B6, "M", "a"), - (0x24B7, "M", "b"), - (0x24B8, "M", "c"), - (0x24B9, "M", "d"), - (0x24BA, "M", "e"), - (0x24BB, "M", "f"), - (0x24BC, "M", "g"), - (0x24BD, "M", "h"), - (0x24BE, "M", "i"), - (0x24BF, "M", "j"), - (0x24C0, "M", "k"), - ] - - -def _seg_24() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x24C1, "M", "l"), - (0x24C2, "M", "m"), - (0x24C3, "M", "n"), - (0x24C4, "M", "o"), - (0x24C5, "M", "p"), - (0x24C6, "M", "q"), - (0x24C7, "M", "r"), - (0x24C8, "M", "s"), - (0x24C9, "M", "t"), - (0x24CA, "M", "u"), - (0x24CB, "M", "v"), - (0x24CC, "M", "w"), - (0x24CD, "M", "x"), - (0x24CE, "M", "y"), - (0x24CF, "M", "z"), - (0x24D0, "M", "a"), - (0x24D1, "M", "b"), - (0x24D2, "M", "c"), - (0x24D3, "M", "d"), - (0x24D4, "M", "e"), - (0x24D5, "M", "f"), - (0x24D6, "M", "g"), - (0x24D7, "M", "h"), - (0x24D8, "M", "i"), - (0x24D9, "M", "j"), - (0x24DA, "M", "k"), - (0x24DB, "M", "l"), - (0x24DC, "M", "m"), - (0x24DD, "M", "n"), - (0x24DE, "M", "o"), - (0x24DF, "M", "p"), - (0x24E0, "M", "q"), - (0x24E1, "M", "r"), - (0x24E2, "M", "s"), - (0x24E3, "M", "t"), - (0x24E4, "M", "u"), - (0x24E5, "M", "v"), - (0x24E6, "M", "w"), - (0x24E7, "M", "x"), - (0x24E8, "M", "y"), - (0x24E9, "M", "z"), - (0x24EA, "M", "0"), - (0x24EB, "V"), - (0x2A0C, "M", "∫∫∫∫"), - (0x2A0D, "V"), - (0x2A74, "3", "::="), - (0x2A75, "3", "=="), - (0x2A76, "3", "==="), - (0x2A77, "V"), - (0x2ADC, "M", "⫝̸"), - (0x2ADD, "V"), - (0x2B74, "X"), - (0x2B76, "V"), - (0x2B96, "X"), - (0x2B97, "V"), - (0x2C00, "M", "ⰰ"), - (0x2C01, "M", "ⰱ"), - (0x2C02, "M", "ⰲ"), - (0x2C03, "M", "ⰳ"), - (0x2C04, "M", "ⰴ"), - (0x2C05, "M", "ⰵ"), - (0x2C06, "M", "ⰶ"), - (0x2C07, "M", "ⰷ"), - (0x2C08, "M", "ⰸ"), - (0x2C09, "M", "ⰹ"), - (0x2C0A, "M", "ⰺ"), - (0x2C0B, "M", "ⰻ"), - (0x2C0C, "M", "ⰼ"), - (0x2C0D, "M", "ⰽ"), - (0x2C0E, "M", "ⰾ"), - (0x2C0F, "M", "ⰿ"), - (0x2C10, "M", "ⱀ"), - (0x2C11, "M", "ⱁ"), - (0x2C12, "M", "ⱂ"), - (0x2C13, "M", "ⱃ"), - (0x2C14, "M", "ⱄ"), - (0x2C15, "M", "ⱅ"), - (0x2C16, "M", "ⱆ"), - (0x2C17, "M", "ⱇ"), - (0x2C18, "M", "ⱈ"), - (0x2C19, "M", "ⱉ"), - (0x2C1A, "M", "ⱊ"), - (0x2C1B, "M", "ⱋ"), - (0x2C1C, "M", "ⱌ"), - (0x2C1D, "M", "ⱍ"), - (0x2C1E, "M", "ⱎ"), - (0x2C1F, "M", "ⱏ"), - (0x2C20, "M", "ⱐ"), - (0x2C21, "M", "ⱑ"), - (0x2C22, "M", "ⱒ"), - (0x2C23, "M", "ⱓ"), - (0x2C24, "M", "ⱔ"), - (0x2C25, "M", "ⱕ"), - (0x2C26, "M", "ⱖ"), - (0x2C27, "M", "ⱗ"), - (0x2C28, "M", "ⱘ"), - (0x2C29, "M", "ⱙ"), - (0x2C2A, "M", "ⱚ"), - (0x2C2B, "M", "ⱛ"), - (0x2C2C, "M", "ⱜ"), - ] - - -def _seg_25() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2C2D, "M", "ⱝ"), - (0x2C2E, "M", "ⱞ"), - (0x2C2F, "M", "ⱟ"), - (0x2C30, "V"), - (0x2C60, "M", "ⱡ"), - (0x2C61, "V"), - (0x2C62, "M", "ɫ"), - (0x2C63, "M", "ᵽ"), - (0x2C64, "M", "ɽ"), - (0x2C65, "V"), - (0x2C67, "M", "ⱨ"), - (0x2C68, "V"), - (0x2C69, "M", "ⱪ"), - (0x2C6A, "V"), - (0x2C6B, "M", "ⱬ"), - (0x2C6C, "V"), - (0x2C6D, "M", "ɑ"), - (0x2C6E, "M", "ɱ"), - (0x2C6F, "M", "ɐ"), - (0x2C70, "M", "ɒ"), - (0x2C71, "V"), - (0x2C72, "M", "ⱳ"), - (0x2C73, "V"), - (0x2C75, "M", "ⱶ"), - (0x2C76, "V"), - (0x2C7C, "M", "j"), - (0x2C7D, "M", "v"), - (0x2C7E, "M", "ȿ"), - (0x2C7F, "M", "ɀ"), - (0x2C80, "M", "ⲁ"), - (0x2C81, "V"), - (0x2C82, "M", "ⲃ"), - (0x2C83, "V"), - (0x2C84, "M", "ⲅ"), - (0x2C85, "V"), - (0x2C86, "M", "ⲇ"), - (0x2C87, "V"), - (0x2C88, "M", "ⲉ"), - (0x2C89, "V"), - (0x2C8A, "M", "ⲋ"), - (0x2C8B, "V"), - (0x2C8C, "M", "ⲍ"), - (0x2C8D, "V"), - (0x2C8E, "M", "ⲏ"), - (0x2C8F, "V"), - (0x2C90, "M", "ⲑ"), - (0x2C91, "V"), - (0x2C92, "M", "ⲓ"), - (0x2C93, "V"), - (0x2C94, "M", "ⲕ"), - (0x2C95, "V"), - (0x2C96, "M", "ⲗ"), - (0x2C97, "V"), - (0x2C98, "M", "ⲙ"), - (0x2C99, "V"), - (0x2C9A, "M", "ⲛ"), - (0x2C9B, "V"), - (0x2C9C, "M", "ⲝ"), - (0x2C9D, "V"), - (0x2C9E, "M", "ⲟ"), - (0x2C9F, "V"), - (0x2CA0, "M", "ⲡ"), - (0x2CA1, "V"), - (0x2CA2, "M", "ⲣ"), - (0x2CA3, "V"), - (0x2CA4, "M", "ⲥ"), - (0x2CA5, "V"), - (0x2CA6, "M", "ⲧ"), - (0x2CA7, "V"), - (0x2CA8, "M", "ⲩ"), - (0x2CA9, "V"), - (0x2CAA, "M", "ⲫ"), - (0x2CAB, "V"), - (0x2CAC, "M", "ⲭ"), - (0x2CAD, "V"), - (0x2CAE, "M", "ⲯ"), - (0x2CAF, "V"), - (0x2CB0, "M", "ⲱ"), - (0x2CB1, "V"), - (0x2CB2, "M", "ⲳ"), - (0x2CB3, "V"), - (0x2CB4, "M", "ⲵ"), - (0x2CB5, "V"), - (0x2CB6, "M", "ⲷ"), - (0x2CB7, "V"), - (0x2CB8, "M", "ⲹ"), - (0x2CB9, "V"), - (0x2CBA, "M", "ⲻ"), - (0x2CBB, "V"), - (0x2CBC, "M", "ⲽ"), - (0x2CBD, "V"), - (0x2CBE, "M", "ⲿ"), - (0x2CBF, "V"), - (0x2CC0, "M", "ⳁ"), - (0x2CC1, "V"), - (0x2CC2, "M", "ⳃ"), - (0x2CC3, "V"), - (0x2CC4, "M", "ⳅ"), - (0x2CC5, "V"), - (0x2CC6, "M", "ⳇ"), - ] - - -def _seg_26() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2CC7, "V"), - (0x2CC8, "M", "ⳉ"), - (0x2CC9, "V"), - (0x2CCA, "M", "ⳋ"), - (0x2CCB, "V"), - (0x2CCC, "M", "ⳍ"), - (0x2CCD, "V"), - (0x2CCE, "M", "ⳏ"), - (0x2CCF, "V"), - (0x2CD0, "M", "ⳑ"), - (0x2CD1, "V"), - (0x2CD2, "M", "ⳓ"), - (0x2CD3, "V"), - (0x2CD4, "M", "ⳕ"), - (0x2CD5, "V"), - (0x2CD6, "M", "ⳗ"), - (0x2CD7, "V"), - (0x2CD8, "M", "ⳙ"), - (0x2CD9, "V"), - (0x2CDA, "M", "ⳛ"), - (0x2CDB, "V"), - (0x2CDC, "M", "ⳝ"), - (0x2CDD, "V"), - (0x2CDE, "M", "ⳟ"), - (0x2CDF, "V"), - (0x2CE0, "M", "ⳡ"), - (0x2CE1, "V"), - (0x2CE2, "M", "ⳣ"), - (0x2CE3, "V"), - (0x2CEB, "M", "ⳬ"), - (0x2CEC, "V"), - (0x2CED, "M", "ⳮ"), - (0x2CEE, "V"), - (0x2CF2, "M", "ⳳ"), - (0x2CF3, "V"), - (0x2CF4, "X"), - (0x2CF9, "V"), - (0x2D26, "X"), - (0x2D27, "V"), - (0x2D28, "X"), - (0x2D2D, "V"), - (0x2D2E, "X"), - (0x2D30, "V"), - (0x2D68, "X"), - (0x2D6F, "M", "ⵡ"), - (0x2D70, "V"), - (0x2D71, "X"), - (0x2D7F, "V"), - (0x2D97, "X"), - (0x2DA0, "V"), - (0x2DA7, "X"), - (0x2DA8, "V"), - (0x2DAF, "X"), - (0x2DB0, "V"), - (0x2DB7, "X"), - (0x2DB8, "V"), - (0x2DBF, "X"), - (0x2DC0, "V"), - (0x2DC7, "X"), - (0x2DC8, "V"), - (0x2DCF, "X"), - (0x2DD0, "V"), - (0x2DD7, "X"), - (0x2DD8, "V"), - (0x2DDF, "X"), - (0x2DE0, "V"), - (0x2E5E, "X"), - (0x2E80, "V"), - (0x2E9A, "X"), - (0x2E9B, "V"), - (0x2E9F, "M", "母"), - (0x2EA0, "V"), - (0x2EF3, "M", "龟"), - (0x2EF4, "X"), - (0x2F00, "M", "一"), - (0x2F01, "M", "丨"), - (0x2F02, "M", "丶"), - (0x2F03, "M", "丿"), - (0x2F04, "M", "乙"), - (0x2F05, "M", "亅"), - (0x2F06, "M", "二"), - (0x2F07, "M", "亠"), - (0x2F08, "M", "人"), - (0x2F09, "M", "儿"), - (0x2F0A, "M", "入"), - (0x2F0B, "M", "八"), - (0x2F0C, "M", "冂"), - (0x2F0D, "M", "冖"), - (0x2F0E, "M", "冫"), - (0x2F0F, "M", "几"), - (0x2F10, "M", "凵"), - (0x2F11, "M", "刀"), - (0x2F12, "M", "力"), - (0x2F13, "M", "勹"), - (0x2F14, "M", "匕"), - (0x2F15, "M", "匚"), - (0x2F16, "M", "匸"), - (0x2F17, "M", "十"), - (0x2F18, "M", "卜"), - (0x2F19, "M", "卩"), - ] - - -def _seg_27() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F1A, "M", "厂"), - (0x2F1B, "M", "厶"), - (0x2F1C, "M", "又"), - (0x2F1D, "M", "口"), - (0x2F1E, "M", "囗"), - (0x2F1F, "M", "土"), - (0x2F20, "M", "士"), - (0x2F21, "M", "夂"), - (0x2F22, "M", "夊"), - (0x2F23, "M", "夕"), - (0x2F24, "M", "大"), - (0x2F25, "M", "女"), - (0x2F26, "M", "子"), - (0x2F27, "M", "宀"), - (0x2F28, "M", "寸"), - (0x2F29, "M", "小"), - (0x2F2A, "M", "尢"), - (0x2F2B, "M", "尸"), - (0x2F2C, "M", "屮"), - (0x2F2D, "M", "山"), - (0x2F2E, "M", "巛"), - (0x2F2F, "M", "工"), - (0x2F30, "M", "己"), - (0x2F31, "M", "巾"), - (0x2F32, "M", "干"), - (0x2F33, "M", "幺"), - (0x2F34, "M", "广"), - (0x2F35, "M", "廴"), - (0x2F36, "M", "廾"), - (0x2F37, "M", "弋"), - (0x2F38, "M", "弓"), - (0x2F39, "M", "彐"), - (0x2F3A, "M", "彡"), - (0x2F3B, "M", "彳"), - (0x2F3C, "M", "心"), - (0x2F3D, "M", "戈"), - (0x2F3E, "M", "戶"), - (0x2F3F, "M", "手"), - (0x2F40, "M", "支"), - (0x2F41, "M", "攴"), - (0x2F42, "M", "文"), - (0x2F43, "M", "斗"), - (0x2F44, "M", "斤"), - (0x2F45, "M", "方"), - (0x2F46, "M", "无"), - (0x2F47, "M", "日"), - (0x2F48, "M", "曰"), - (0x2F49, "M", "月"), - (0x2F4A, "M", "木"), - (0x2F4B, "M", "欠"), - (0x2F4C, "M", "止"), - (0x2F4D, "M", "歹"), - (0x2F4E, "M", "殳"), - (0x2F4F, "M", "毋"), - (0x2F50, "M", "比"), - (0x2F51, "M", "毛"), - (0x2F52, "M", "氏"), - (0x2F53, "M", "气"), - (0x2F54, "M", "水"), - (0x2F55, "M", "火"), - (0x2F56, "M", "爪"), - (0x2F57, "M", "父"), - (0x2F58, "M", "爻"), - (0x2F59, "M", "爿"), - (0x2F5A, "M", "片"), - (0x2F5B, "M", "牙"), - (0x2F5C, "M", "牛"), - (0x2F5D, "M", "犬"), - (0x2F5E, "M", "玄"), - (0x2F5F, "M", "玉"), - (0x2F60, "M", "瓜"), - (0x2F61, "M", "瓦"), - (0x2F62, "M", "甘"), - (0x2F63, "M", "生"), - (0x2F64, "M", "用"), - (0x2F65, "M", "田"), - (0x2F66, "M", "疋"), - (0x2F67, "M", "疒"), - (0x2F68, "M", "癶"), - (0x2F69, "M", "白"), - (0x2F6A, "M", "皮"), - (0x2F6B, "M", "皿"), - (0x2F6C, "M", "目"), - (0x2F6D, "M", "矛"), - (0x2F6E, "M", "矢"), - (0x2F6F, "M", "石"), - (0x2F70, "M", "示"), - (0x2F71, "M", "禸"), - (0x2F72, "M", "禾"), - (0x2F73, "M", "穴"), - (0x2F74, "M", "立"), - (0x2F75, "M", "竹"), - (0x2F76, "M", "米"), - (0x2F77, "M", "糸"), - (0x2F78, "M", "缶"), - (0x2F79, "M", "网"), - (0x2F7A, "M", "羊"), - (0x2F7B, "M", "羽"), - (0x2F7C, "M", "老"), - (0x2F7D, "M", "而"), - ] - - -def _seg_28() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F7E, "M", "耒"), - (0x2F7F, "M", "耳"), - (0x2F80, "M", "聿"), - (0x2F81, "M", "肉"), - (0x2F82, "M", "臣"), - (0x2F83, "M", "自"), - (0x2F84, "M", "至"), - (0x2F85, "M", "臼"), - (0x2F86, "M", "舌"), - (0x2F87, "M", "舛"), - (0x2F88, "M", "舟"), - (0x2F89, "M", "艮"), - (0x2F8A, "M", "色"), - (0x2F8B, "M", "艸"), - (0x2F8C, "M", "虍"), - (0x2F8D, "M", "虫"), - (0x2F8E, "M", "血"), - (0x2F8F, "M", "行"), - (0x2F90, "M", "衣"), - (0x2F91, "M", "襾"), - (0x2F92, "M", "見"), - (0x2F93, "M", "角"), - (0x2F94, "M", "言"), - (0x2F95, "M", "谷"), - (0x2F96, "M", "豆"), - (0x2F97, "M", "豕"), - (0x2F98, "M", "豸"), - (0x2F99, "M", "貝"), - (0x2F9A, "M", "赤"), - (0x2F9B, "M", "走"), - (0x2F9C, "M", "足"), - (0x2F9D, "M", "身"), - (0x2F9E, "M", "車"), - (0x2F9F, "M", "辛"), - (0x2FA0, "M", "辰"), - (0x2FA1, "M", "辵"), - (0x2FA2, "M", "邑"), - (0x2FA3, "M", "酉"), - (0x2FA4, "M", "釆"), - (0x2FA5, "M", "里"), - (0x2FA6, "M", "金"), - (0x2FA7, "M", "長"), - (0x2FA8, "M", "門"), - (0x2FA9, "M", "阜"), - (0x2FAA, "M", "隶"), - (0x2FAB, "M", "隹"), - (0x2FAC, "M", "雨"), - (0x2FAD, "M", "靑"), - (0x2FAE, "M", "非"), - (0x2FAF, "M", "面"), - (0x2FB0, "M", "革"), - (0x2FB1, "M", "韋"), - (0x2FB2, "M", "韭"), - (0x2FB3, "M", "音"), - (0x2FB4, "M", "頁"), - (0x2FB5, "M", "風"), - (0x2FB6, "M", "飛"), - (0x2FB7, "M", "食"), - (0x2FB8, "M", "首"), - (0x2FB9, "M", "香"), - (0x2FBA, "M", "馬"), - (0x2FBB, "M", "骨"), - (0x2FBC, "M", "高"), - (0x2FBD, "M", "髟"), - (0x2FBE, "M", "鬥"), - (0x2FBF, "M", "鬯"), - (0x2FC0, "M", "鬲"), - (0x2FC1, "M", "鬼"), - (0x2FC2, "M", "魚"), - (0x2FC3, "M", "鳥"), - (0x2FC4, "M", "鹵"), - (0x2FC5, "M", "鹿"), - (0x2FC6, "M", "麥"), - (0x2FC7, "M", "麻"), - (0x2FC8, "M", "黃"), - (0x2FC9, "M", "黍"), - (0x2FCA, "M", "黑"), - (0x2FCB, "M", "黹"), - (0x2FCC, "M", "黽"), - (0x2FCD, "M", "鼎"), - (0x2FCE, "M", "鼓"), - (0x2FCF, "M", "鼠"), - (0x2FD0, "M", "鼻"), - (0x2FD1, "M", "齊"), - (0x2FD2, "M", "齒"), - (0x2FD3, "M", "龍"), - (0x2FD4, "M", "龜"), - (0x2FD5, "M", "龠"), - (0x2FD6, "X"), - (0x3000, "3", " "), - (0x3001, "V"), - (0x3002, "M", "."), - (0x3003, "V"), - (0x3036, "M", "〒"), - (0x3037, "V"), - (0x3038, "M", "十"), - (0x3039, "M", "卄"), - (0x303A, "M", "卅"), - (0x303B, "V"), - (0x3040, "X"), - ] - - -def _seg_29() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3041, "V"), - (0x3097, "X"), - (0x3099, "V"), - (0x309B, "3", " ゙"), - (0x309C, "3", " ゚"), - (0x309D, "V"), - (0x309F, "M", "より"), - (0x30A0, "V"), - (0x30FF, "M", "コト"), - (0x3100, "X"), - (0x3105, "V"), - (0x3130, "X"), - (0x3131, "M", "ᄀ"), - (0x3132, "M", "ᄁ"), - (0x3133, "M", "ᆪ"), - (0x3134, "M", "ᄂ"), - (0x3135, "M", "ᆬ"), - (0x3136, "M", "ᆭ"), - (0x3137, "M", "ᄃ"), - (0x3138, "M", "ᄄ"), - (0x3139, "M", "ᄅ"), - (0x313A, "M", "ᆰ"), - (0x313B, "M", "ᆱ"), - (0x313C, "M", "ᆲ"), - (0x313D, "M", "ᆳ"), - (0x313E, "M", "ᆴ"), - (0x313F, "M", "ᆵ"), - (0x3140, "M", "ᄚ"), - (0x3141, "M", "ᄆ"), - (0x3142, "M", "ᄇ"), - (0x3143, "M", "ᄈ"), - (0x3144, "M", "ᄡ"), - (0x3145, "M", "ᄉ"), - (0x3146, "M", "ᄊ"), - (0x3147, "M", "ᄋ"), - (0x3148, "M", "ᄌ"), - (0x3149, "M", "ᄍ"), - (0x314A, "M", "ᄎ"), - (0x314B, "M", "ᄏ"), - (0x314C, "M", "ᄐ"), - (0x314D, "M", "ᄑ"), - (0x314E, "M", "ᄒ"), - (0x314F, "M", "ᅡ"), - (0x3150, "M", "ᅢ"), - (0x3151, "M", "ᅣ"), - (0x3152, "M", "ᅤ"), - (0x3153, "M", "ᅥ"), - (0x3154, "M", "ᅦ"), - (0x3155, "M", "ᅧ"), - (0x3156, "M", "ᅨ"), - (0x3157, "M", "ᅩ"), - (0x3158, "M", "ᅪ"), - (0x3159, "M", "ᅫ"), - (0x315A, "M", "ᅬ"), - (0x315B, "M", "ᅭ"), - (0x315C, "M", "ᅮ"), - (0x315D, "M", "ᅯ"), - (0x315E, "M", "ᅰ"), - (0x315F, "M", "ᅱ"), - (0x3160, "M", "ᅲ"), - (0x3161, "M", "ᅳ"), - (0x3162, "M", "ᅴ"), - (0x3163, "M", "ᅵ"), - (0x3164, "X"), - (0x3165, "M", "ᄔ"), - (0x3166, "M", "ᄕ"), - (0x3167, "M", "ᇇ"), - (0x3168, "M", "ᇈ"), - (0x3169, "M", "ᇌ"), - (0x316A, "M", "ᇎ"), - (0x316B, "M", "ᇓ"), - (0x316C, "M", "ᇗ"), - (0x316D, "M", "ᇙ"), - (0x316E, "M", "ᄜ"), - (0x316F, "M", "ᇝ"), - (0x3170, "M", "ᇟ"), - (0x3171, "M", "ᄝ"), - (0x3172, "M", "ᄞ"), - (0x3173, "M", "ᄠ"), - (0x3174, "M", "ᄢ"), - (0x3175, "M", "ᄣ"), - (0x3176, "M", "ᄧ"), - (0x3177, "M", "ᄩ"), - (0x3178, "M", "ᄫ"), - (0x3179, "M", "ᄬ"), - (0x317A, "M", "ᄭ"), - (0x317B, "M", "ᄮ"), - (0x317C, "M", "ᄯ"), - (0x317D, "M", "ᄲ"), - (0x317E, "M", "ᄶ"), - (0x317F, "M", "ᅀ"), - (0x3180, "M", "ᅇ"), - (0x3181, "M", "ᅌ"), - (0x3182, "M", "ᇱ"), - (0x3183, "M", "ᇲ"), - (0x3184, "M", "ᅗ"), - (0x3185, "M", "ᅘ"), - (0x3186, "M", "ᅙ"), - (0x3187, "M", "ᆄ"), - (0x3188, "M", "ᆅ"), - ] - - -def _seg_30() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3189, "M", "ᆈ"), - (0x318A, "M", "ᆑ"), - (0x318B, "M", "ᆒ"), - (0x318C, "M", "ᆔ"), - (0x318D, "M", "ᆞ"), - (0x318E, "M", "ᆡ"), - (0x318F, "X"), - (0x3190, "V"), - (0x3192, "M", "一"), - (0x3193, "M", "二"), - (0x3194, "M", "三"), - (0x3195, "M", "四"), - (0x3196, "M", "上"), - (0x3197, "M", "中"), - (0x3198, "M", "下"), - (0x3199, "M", "甲"), - (0x319A, "M", "乙"), - (0x319B, "M", "丙"), - (0x319C, "M", "丁"), - (0x319D, "M", "天"), - (0x319E, "M", "地"), - (0x319F, "M", "人"), - (0x31A0, "V"), - (0x31E4, "X"), - (0x31F0, "V"), - (0x3200, "3", "(ᄀ)"), - (0x3201, "3", "(ᄂ)"), - (0x3202, "3", "(ᄃ)"), - (0x3203, "3", "(ᄅ)"), - (0x3204, "3", "(ᄆ)"), - (0x3205, "3", "(ᄇ)"), - (0x3206, "3", "(ᄉ)"), - (0x3207, "3", "(ᄋ)"), - (0x3208, "3", "(ᄌ)"), - (0x3209, "3", "(ᄎ)"), - (0x320A, "3", "(ᄏ)"), - (0x320B, "3", "(ᄐ)"), - (0x320C, "3", "(ᄑ)"), - (0x320D, "3", "(ᄒ)"), - (0x320E, "3", "(가)"), - (0x320F, "3", "(나)"), - (0x3210, "3", "(다)"), - (0x3211, "3", "(라)"), - (0x3212, "3", "(마)"), - (0x3213, "3", "(바)"), - (0x3214, "3", "(사)"), - (0x3215, "3", "(아)"), - (0x3216, "3", "(자)"), - (0x3217, "3", "(차)"), - (0x3218, "3", "(카)"), - (0x3219, "3", "(타)"), - (0x321A, "3", "(파)"), - (0x321B, "3", "(하)"), - (0x321C, "3", "(주)"), - (0x321D, "3", "(오전)"), - (0x321E, "3", "(오후)"), - (0x321F, "X"), - (0x3220, "3", "(一)"), - (0x3221, "3", "(二)"), - (0x3222, "3", "(三)"), - (0x3223, "3", "(四)"), - (0x3224, "3", "(五)"), - (0x3225, "3", "(六)"), - (0x3226, "3", "(七)"), - (0x3227, "3", "(八)"), - (0x3228, "3", "(九)"), - (0x3229, "3", "(十)"), - (0x322A, "3", "(月)"), - (0x322B, "3", "(火)"), - (0x322C, "3", "(水)"), - (0x322D, "3", "(木)"), - (0x322E, "3", "(金)"), - (0x322F, "3", "(土)"), - (0x3230, "3", "(日)"), - (0x3231, "3", "(株)"), - (0x3232, "3", "(有)"), - (0x3233, "3", "(社)"), - (0x3234, "3", "(名)"), - (0x3235, "3", "(特)"), - (0x3236, "3", "(財)"), - (0x3237, "3", "(祝)"), - (0x3238, "3", "(労)"), - (0x3239, "3", "(代)"), - (0x323A, "3", "(呼)"), - (0x323B, "3", "(学)"), - (0x323C, "3", "(監)"), - (0x323D, "3", "(企)"), - (0x323E, "3", "(資)"), - (0x323F, "3", "(協)"), - (0x3240, "3", "(祭)"), - (0x3241, "3", "(休)"), - (0x3242, "3", "(自)"), - (0x3243, "3", "(至)"), - (0x3244, "M", "問"), - (0x3245, "M", "幼"), - (0x3246, "M", "文"), - (0x3247, "M", "箏"), - (0x3248, "V"), - (0x3250, "M", "pte"), - (0x3251, "M", "21"), - ] - - -def _seg_31() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x3252, "M", "22"), - (0x3253, "M", "23"), - (0x3254, "M", "24"), - (0x3255, "M", "25"), - (0x3256, "M", "26"), - (0x3257, "M", "27"), - (0x3258, "M", "28"), - (0x3259, "M", "29"), - (0x325A, "M", "30"), - (0x325B, "M", "31"), - (0x325C, "M", "32"), - (0x325D, "M", "33"), - (0x325E, "M", "34"), - (0x325F, "M", "35"), - (0x3260, "M", "ᄀ"), - (0x3261, "M", "ᄂ"), - (0x3262, "M", "ᄃ"), - (0x3263, "M", "ᄅ"), - (0x3264, "M", "ᄆ"), - (0x3265, "M", "ᄇ"), - (0x3266, "M", "ᄉ"), - (0x3267, "M", "ᄋ"), - (0x3268, "M", "ᄌ"), - (0x3269, "M", "ᄎ"), - (0x326A, "M", "ᄏ"), - (0x326B, "M", "ᄐ"), - (0x326C, "M", "ᄑ"), - (0x326D, "M", "ᄒ"), - (0x326E, "M", "가"), - (0x326F, "M", "나"), - (0x3270, "M", "다"), - (0x3271, "M", "라"), - (0x3272, "M", "마"), - (0x3273, "M", "바"), - (0x3274, "M", "사"), - (0x3275, "M", "아"), - (0x3276, "M", "자"), - (0x3277, "M", "차"), - (0x3278, "M", "카"), - (0x3279, "M", "타"), - (0x327A, "M", "파"), - (0x327B, "M", "하"), - (0x327C, "M", "참고"), - (0x327D, "M", "주의"), - (0x327E, "M", "우"), - (0x327F, "V"), - (0x3280, "M", "一"), - (0x3281, "M", "二"), - (0x3282, "M", "三"), - (0x3283, "M", "四"), - (0x3284, "M", "五"), - (0x3285, "M", "六"), - (0x3286, "M", "七"), - (0x3287, "M", "八"), - (0x3288, "M", "九"), - (0x3289, "M", "十"), - (0x328A, "M", "月"), - (0x328B, "M", "火"), - (0x328C, "M", "水"), - (0x328D, "M", "木"), - (0x328E, "M", "金"), - (0x328F, "M", "土"), - (0x3290, "M", "日"), - (0x3291, "M", "株"), - (0x3292, "M", "有"), - (0x3293, "M", "社"), - (0x3294, "M", "名"), - (0x3295, "M", "特"), - (0x3296, "M", "財"), - (0x3297, "M", "祝"), - (0x3298, "M", "労"), - (0x3299, "M", "秘"), - (0x329A, "M", "男"), - (0x329B, "M", "女"), - (0x329C, "M", "適"), - (0x329D, "M", "優"), - (0x329E, "M", "印"), - (0x329F, "M", "注"), - (0x32A0, "M", "項"), - (0x32A1, "M", "休"), - (0x32A2, "M", "写"), - (0x32A3, "M", "正"), - (0x32A4, "M", "上"), - (0x32A5, "M", "中"), - (0x32A6, "M", "下"), - (0x32A7, "M", "左"), - (0x32A8, "M", "右"), - (0x32A9, "M", "医"), - (0x32AA, "M", "宗"), - (0x32AB, "M", "学"), - (0x32AC, "M", "監"), - (0x32AD, "M", "企"), - (0x32AE, "M", "資"), - (0x32AF, "M", "協"), - (0x32B0, "M", "夜"), - (0x32B1, "M", "36"), - (0x32B2, "M", "37"), - (0x32B3, "M", "38"), - (0x32B4, "M", "39"), - (0x32B5, "M", "40"), - ] - - -def _seg_32() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x32B6, "M", "41"), - (0x32B7, "M", "42"), - (0x32B8, "M", "43"), - (0x32B9, "M", "44"), - (0x32BA, "M", "45"), - (0x32BB, "M", "46"), - (0x32BC, "M", "47"), - (0x32BD, "M", "48"), - (0x32BE, "M", "49"), - (0x32BF, "M", "50"), - (0x32C0, "M", "1月"), - (0x32C1, "M", "2月"), - (0x32C2, "M", "3月"), - (0x32C3, "M", "4月"), - (0x32C4, "M", "5月"), - (0x32C5, "M", "6月"), - (0x32C6, "M", "7月"), - (0x32C7, "M", "8月"), - (0x32C8, "M", "9月"), - (0x32C9, "M", "10月"), - (0x32CA, "M", "11月"), - (0x32CB, "M", "12月"), - (0x32CC, "M", "hg"), - (0x32CD, "M", "erg"), - (0x32CE, "M", "ev"), - (0x32CF, "M", "ltd"), - (0x32D0, "M", "ア"), - (0x32D1, "M", "イ"), - (0x32D2, "M", "ウ"), - (0x32D3, "M", "エ"), - (0x32D4, "M", "オ"), - (0x32D5, "M", "カ"), - (0x32D6, "M", "キ"), - (0x32D7, "M", "ク"), - (0x32D8, "M", "ケ"), - (0x32D9, "M", "コ"), - (0x32DA, "M", "サ"), - (0x32DB, "M", "シ"), - (0x32DC, "M", "ス"), - (0x32DD, "M", "セ"), - (0x32DE, "M", "ソ"), - (0x32DF, "M", "タ"), - (0x32E0, "M", "チ"), - (0x32E1, "M", "ツ"), - (0x32E2, "M", "テ"), - (0x32E3, "M", "ト"), - (0x32E4, "M", "ナ"), - (0x32E5, "M", "ニ"), - (0x32E6, "M", "ヌ"), - (0x32E7, "M", "ネ"), - (0x32E8, "M", "ノ"), - (0x32E9, "M", "ハ"), - (0x32EA, "M", "ヒ"), - (0x32EB, "M", "フ"), - (0x32EC, "M", "ヘ"), - (0x32ED, "M", "ホ"), - (0x32EE, "M", "マ"), - (0x32EF, "M", "ミ"), - (0x32F0, "M", "ム"), - (0x32F1, "M", "メ"), - (0x32F2, "M", "モ"), - (0x32F3, "M", "ヤ"), - (0x32F4, "M", "ユ"), - (0x32F5, "M", "ヨ"), - (0x32F6, "M", "ラ"), - (0x32F7, "M", "リ"), - (0x32F8, "M", "ル"), - (0x32F9, "M", "レ"), - (0x32FA, "M", "ロ"), - (0x32FB, "M", "ワ"), - (0x32FC, "M", "ヰ"), - (0x32FD, "M", "ヱ"), - (0x32FE, "M", "ヲ"), - (0x32FF, "M", "令和"), - (0x3300, "M", "アパート"), - (0x3301, "M", "アルファ"), - (0x3302, "M", "アンペア"), - (0x3303, "M", "アール"), - (0x3304, "M", "イニング"), - (0x3305, "M", "インチ"), - (0x3306, "M", "ウォン"), - (0x3307, "M", "エスクード"), - (0x3308, "M", "エーカー"), - (0x3309, "M", "オンス"), - (0x330A, "M", "オーム"), - (0x330B, "M", "カイリ"), - (0x330C, "M", "カラット"), - (0x330D, "M", "カロリー"), - (0x330E, "M", "ガロン"), - (0x330F, "M", "ガンマ"), - (0x3310, "M", "ギガ"), - (0x3311, "M", "ギニー"), - (0x3312, "M", "キュリー"), - (0x3313, "M", "ギルダー"), - (0x3314, "M", "キロ"), - (0x3315, "M", "キログラム"), - (0x3316, "M", "キロメートル"), - (0x3317, "M", "キロワット"), - (0x3318, "M", "グラム"), - (0x3319, "M", "グラムトン"), - ] - - -def _seg_33() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x331A, "M", "クルゼイロ"), - (0x331B, "M", "クローネ"), - (0x331C, "M", "ケース"), - (0x331D, "M", "コルナ"), - (0x331E, "M", "コーポ"), - (0x331F, "M", "サイクル"), - (0x3320, "M", "サンチーム"), - (0x3321, "M", "シリング"), - (0x3322, "M", "センチ"), - (0x3323, "M", "セント"), - (0x3324, "M", "ダース"), - (0x3325, "M", "デシ"), - (0x3326, "M", "ドル"), - (0x3327, "M", "トン"), - (0x3328, "M", "ナノ"), - (0x3329, "M", "ノット"), - (0x332A, "M", "ハイツ"), - (0x332B, "M", "パーセント"), - (0x332C, "M", "パーツ"), - (0x332D, "M", "バーレル"), - (0x332E, "M", "ピアストル"), - (0x332F, "M", "ピクル"), - (0x3330, "M", "ピコ"), - (0x3331, "M", "ビル"), - (0x3332, "M", "ファラッド"), - (0x3333, "M", "フィート"), - (0x3334, "M", "ブッシェル"), - (0x3335, "M", "フラン"), - (0x3336, "M", "ヘクタール"), - (0x3337, "M", "ペソ"), - (0x3338, "M", "ペニヒ"), - (0x3339, "M", "ヘルツ"), - (0x333A, "M", "ペンス"), - (0x333B, "M", "ページ"), - (0x333C, "M", "ベータ"), - (0x333D, "M", "ポイント"), - (0x333E, "M", "ボルト"), - (0x333F, "M", "ホン"), - (0x3340, "M", "ポンド"), - (0x3341, "M", "ホール"), - (0x3342, "M", "ホーン"), - (0x3343, "M", "マイクロ"), - (0x3344, "M", "マイル"), - (0x3345, "M", "マッハ"), - (0x3346, "M", "マルク"), - (0x3347, "M", "マンション"), - (0x3348, "M", "ミクロン"), - (0x3349, "M", "ミリ"), - (0x334A, "M", "ミリバール"), - (0x334B, "M", "メガ"), - (0x334C, "M", "メガトン"), - (0x334D, "M", "メートル"), - (0x334E, "M", "ヤード"), - (0x334F, "M", "ヤール"), - (0x3350, "M", "ユアン"), - (0x3351, "M", "リットル"), - (0x3352, "M", "リラ"), - (0x3353, "M", "ルピー"), - (0x3354, "M", "ルーブル"), - (0x3355, "M", "レム"), - (0x3356, "M", "レントゲン"), - (0x3357, "M", "ワット"), - (0x3358, "M", "0点"), - (0x3359, "M", "1点"), - (0x335A, "M", "2点"), - (0x335B, "M", "3点"), - (0x335C, "M", "4点"), - (0x335D, "M", "5点"), - (0x335E, "M", "6点"), - (0x335F, "M", "7点"), - (0x3360, "M", "8点"), - (0x3361, "M", "9点"), - (0x3362, "M", "10点"), - (0x3363, "M", "11点"), - (0x3364, "M", "12点"), - (0x3365, "M", "13点"), - (0x3366, "M", "14点"), - (0x3367, "M", "15点"), - (0x3368, "M", "16点"), - (0x3369, "M", "17点"), - (0x336A, "M", "18点"), - (0x336B, "M", "19点"), - (0x336C, "M", "20点"), - (0x336D, "M", "21点"), - (0x336E, "M", "22点"), - (0x336F, "M", "23点"), - (0x3370, "M", "24点"), - (0x3371, "M", "hpa"), - (0x3372, "M", "da"), - (0x3373, "M", "au"), - (0x3374, "M", "bar"), - (0x3375, "M", "ov"), - (0x3376, "M", "pc"), - (0x3377, "M", "dm"), - (0x3378, "M", "dm2"), - (0x3379, "M", "dm3"), - (0x337A, "M", "iu"), - (0x337B, "M", "平成"), - (0x337C, "M", "昭和"), - (0x337D, "M", "大正"), - ] - - -def _seg_34() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x337E, "M", "明治"), - (0x337F, "M", "株式会社"), - (0x3380, "M", "pa"), - (0x3381, "M", "na"), - (0x3382, "M", "μa"), - (0x3383, "M", "ma"), - (0x3384, "M", "ka"), - (0x3385, "M", "kb"), - (0x3386, "M", "mb"), - (0x3387, "M", "gb"), - (0x3388, "M", "cal"), - (0x3389, "M", "kcal"), - (0x338A, "M", "pf"), - (0x338B, "M", "nf"), - (0x338C, "M", "μf"), - (0x338D, "M", "μg"), - (0x338E, "M", "mg"), - (0x338F, "M", "kg"), - (0x3390, "M", "hz"), - (0x3391, "M", "khz"), - (0x3392, "M", "mhz"), - (0x3393, "M", "ghz"), - (0x3394, "M", "thz"), - (0x3395, "M", "μl"), - (0x3396, "M", "ml"), - (0x3397, "M", "dl"), - (0x3398, "M", "kl"), - (0x3399, "M", "fm"), - (0x339A, "M", "nm"), - (0x339B, "M", "μm"), - (0x339C, "M", "mm"), - (0x339D, "M", "cm"), - (0x339E, "M", "km"), - (0x339F, "M", "mm2"), - (0x33A0, "M", "cm2"), - (0x33A1, "M", "m2"), - (0x33A2, "M", "km2"), - (0x33A3, "M", "mm3"), - (0x33A4, "M", "cm3"), - (0x33A5, "M", "m3"), - (0x33A6, "M", "km3"), - (0x33A7, "M", "m∕s"), - (0x33A8, "M", "m∕s2"), - (0x33A9, "M", "pa"), - (0x33AA, "M", "kpa"), - (0x33AB, "M", "mpa"), - (0x33AC, "M", "gpa"), - (0x33AD, "M", "rad"), - (0x33AE, "M", "rad∕s"), - (0x33AF, "M", "rad∕s2"), - (0x33B0, "M", "ps"), - (0x33B1, "M", "ns"), - (0x33B2, "M", "μs"), - (0x33B3, "M", "ms"), - (0x33B4, "M", "pv"), - (0x33B5, "M", "nv"), - (0x33B6, "M", "μv"), - (0x33B7, "M", "mv"), - (0x33B8, "M", "kv"), - (0x33B9, "M", "mv"), - (0x33BA, "M", "pw"), - (0x33BB, "M", "nw"), - (0x33BC, "M", "μw"), - (0x33BD, "M", "mw"), - (0x33BE, "M", "kw"), - (0x33BF, "M", "mw"), - (0x33C0, "M", "kω"), - (0x33C1, "M", "mω"), - (0x33C2, "X"), - (0x33C3, "M", "bq"), - (0x33C4, "M", "cc"), - (0x33C5, "M", "cd"), - (0x33C6, "M", "c∕kg"), - (0x33C7, "X"), - (0x33C8, "M", "db"), - (0x33C9, "M", "gy"), - (0x33CA, "M", "ha"), - (0x33CB, "M", "hp"), - (0x33CC, "M", "in"), - (0x33CD, "M", "kk"), - (0x33CE, "M", "km"), - (0x33CF, "M", "kt"), - (0x33D0, "M", "lm"), - (0x33D1, "M", "ln"), - (0x33D2, "M", "log"), - (0x33D3, "M", "lx"), - (0x33D4, "M", "mb"), - (0x33D5, "M", "mil"), - (0x33D6, "M", "mol"), - (0x33D7, "M", "ph"), - (0x33D8, "X"), - (0x33D9, "M", "ppm"), - (0x33DA, "M", "pr"), - (0x33DB, "M", "sr"), - (0x33DC, "M", "sv"), - (0x33DD, "M", "wb"), - (0x33DE, "M", "v∕m"), - (0x33DF, "M", "a∕m"), - (0x33E0, "M", "1日"), - (0x33E1, "M", "2日"), - ] - - -def _seg_35() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x33E2, "M", "3日"), - (0x33E3, "M", "4日"), - (0x33E4, "M", "5日"), - (0x33E5, "M", "6日"), - (0x33E6, "M", "7日"), - (0x33E7, "M", "8日"), - (0x33E8, "M", "9日"), - (0x33E9, "M", "10日"), - (0x33EA, "M", "11日"), - (0x33EB, "M", "12日"), - (0x33EC, "M", "13日"), - (0x33ED, "M", "14日"), - (0x33EE, "M", "15日"), - (0x33EF, "M", "16日"), - (0x33F0, "M", "17日"), - (0x33F1, "M", "18日"), - (0x33F2, "M", "19日"), - (0x33F3, "M", "20日"), - (0x33F4, "M", "21日"), - (0x33F5, "M", "22日"), - (0x33F6, "M", "23日"), - (0x33F7, "M", "24日"), - (0x33F8, "M", "25日"), - (0x33F9, "M", "26日"), - (0x33FA, "M", "27日"), - (0x33FB, "M", "28日"), - (0x33FC, "M", "29日"), - (0x33FD, "M", "30日"), - (0x33FE, "M", "31日"), - (0x33FF, "M", "gal"), - (0x3400, "V"), - (0xA48D, "X"), - (0xA490, "V"), - (0xA4C7, "X"), - (0xA4D0, "V"), - (0xA62C, "X"), - (0xA640, "M", "ꙁ"), - (0xA641, "V"), - (0xA642, "M", "ꙃ"), - (0xA643, "V"), - (0xA644, "M", "ꙅ"), - (0xA645, "V"), - (0xA646, "M", "ꙇ"), - (0xA647, "V"), - (0xA648, "M", "ꙉ"), - (0xA649, "V"), - (0xA64A, "M", "ꙋ"), - (0xA64B, "V"), - (0xA64C, "M", "ꙍ"), - (0xA64D, "V"), - (0xA64E, "M", "ꙏ"), - (0xA64F, "V"), - (0xA650, "M", "ꙑ"), - (0xA651, "V"), - (0xA652, "M", "ꙓ"), - (0xA653, "V"), - (0xA654, "M", "ꙕ"), - (0xA655, "V"), - (0xA656, "M", "ꙗ"), - (0xA657, "V"), - (0xA658, "M", "ꙙ"), - (0xA659, "V"), - (0xA65A, "M", "ꙛ"), - (0xA65B, "V"), - (0xA65C, "M", "ꙝ"), - (0xA65D, "V"), - (0xA65E, "M", "ꙟ"), - (0xA65F, "V"), - (0xA660, "M", "ꙡ"), - (0xA661, "V"), - (0xA662, "M", "ꙣ"), - (0xA663, "V"), - (0xA664, "M", "ꙥ"), - (0xA665, "V"), - (0xA666, "M", "ꙧ"), - (0xA667, "V"), - (0xA668, "M", "ꙩ"), - (0xA669, "V"), - (0xA66A, "M", "ꙫ"), - (0xA66B, "V"), - (0xA66C, "M", "ꙭ"), - (0xA66D, "V"), - (0xA680, "M", "ꚁ"), - (0xA681, "V"), - (0xA682, "M", "ꚃ"), - (0xA683, "V"), - (0xA684, "M", "ꚅ"), - (0xA685, "V"), - (0xA686, "M", "ꚇ"), - (0xA687, "V"), - (0xA688, "M", "ꚉ"), - (0xA689, "V"), - (0xA68A, "M", "ꚋ"), - (0xA68B, "V"), - (0xA68C, "M", "ꚍ"), - (0xA68D, "V"), - (0xA68E, "M", "ꚏ"), - (0xA68F, "V"), - (0xA690, "M", "ꚑ"), - (0xA691, "V"), - ] - - -def _seg_36() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA692, "M", "ꚓ"), - (0xA693, "V"), - (0xA694, "M", "ꚕ"), - (0xA695, "V"), - (0xA696, "M", "ꚗ"), - (0xA697, "V"), - (0xA698, "M", "ꚙ"), - (0xA699, "V"), - (0xA69A, "M", "ꚛ"), - (0xA69B, "V"), - (0xA69C, "M", "ъ"), - (0xA69D, "M", "ь"), - (0xA69E, "V"), - (0xA6F8, "X"), - (0xA700, "V"), - (0xA722, "M", "ꜣ"), - (0xA723, "V"), - (0xA724, "M", "ꜥ"), - (0xA725, "V"), - (0xA726, "M", "ꜧ"), - (0xA727, "V"), - (0xA728, "M", "ꜩ"), - (0xA729, "V"), - (0xA72A, "M", "ꜫ"), - (0xA72B, "V"), - (0xA72C, "M", "ꜭ"), - (0xA72D, "V"), - (0xA72E, "M", "ꜯ"), - (0xA72F, "V"), - (0xA732, "M", "ꜳ"), - (0xA733, "V"), - (0xA734, "M", "ꜵ"), - (0xA735, "V"), - (0xA736, "M", "ꜷ"), - (0xA737, "V"), - (0xA738, "M", "ꜹ"), - (0xA739, "V"), - (0xA73A, "M", "ꜻ"), - (0xA73B, "V"), - (0xA73C, "M", "ꜽ"), - (0xA73D, "V"), - (0xA73E, "M", "ꜿ"), - (0xA73F, "V"), - (0xA740, "M", "ꝁ"), - (0xA741, "V"), - (0xA742, "M", "ꝃ"), - (0xA743, "V"), - (0xA744, "M", "ꝅ"), - (0xA745, "V"), - (0xA746, "M", "ꝇ"), - (0xA747, "V"), - (0xA748, "M", "ꝉ"), - (0xA749, "V"), - (0xA74A, "M", "ꝋ"), - (0xA74B, "V"), - (0xA74C, "M", "ꝍ"), - (0xA74D, "V"), - (0xA74E, "M", "ꝏ"), - (0xA74F, "V"), - (0xA750, "M", "ꝑ"), - (0xA751, "V"), - (0xA752, "M", "ꝓ"), - (0xA753, "V"), - (0xA754, "M", "ꝕ"), - (0xA755, "V"), - (0xA756, "M", "ꝗ"), - (0xA757, "V"), - (0xA758, "M", "ꝙ"), - (0xA759, "V"), - (0xA75A, "M", "ꝛ"), - (0xA75B, "V"), - (0xA75C, "M", "ꝝ"), - (0xA75D, "V"), - (0xA75E, "M", "ꝟ"), - (0xA75F, "V"), - (0xA760, "M", "ꝡ"), - (0xA761, "V"), - (0xA762, "M", "ꝣ"), - (0xA763, "V"), - (0xA764, "M", "ꝥ"), - (0xA765, "V"), - (0xA766, "M", "ꝧ"), - (0xA767, "V"), - (0xA768, "M", "ꝩ"), - (0xA769, "V"), - (0xA76A, "M", "ꝫ"), - (0xA76B, "V"), - (0xA76C, "M", "ꝭ"), - (0xA76D, "V"), - (0xA76E, "M", "ꝯ"), - (0xA76F, "V"), - (0xA770, "M", "ꝯ"), - (0xA771, "V"), - (0xA779, "M", "ꝺ"), - (0xA77A, "V"), - (0xA77B, "M", "ꝼ"), - (0xA77C, "V"), - (0xA77D, "M", "ᵹ"), - (0xA77E, "M", "ꝿ"), - (0xA77F, "V"), - ] - - -def _seg_37() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA780, "M", "ꞁ"), - (0xA781, "V"), - (0xA782, "M", "ꞃ"), - (0xA783, "V"), - (0xA784, "M", "ꞅ"), - (0xA785, "V"), - (0xA786, "M", "ꞇ"), - (0xA787, "V"), - (0xA78B, "M", "ꞌ"), - (0xA78C, "V"), - (0xA78D, "M", "ɥ"), - (0xA78E, "V"), - (0xA790, "M", "ꞑ"), - (0xA791, "V"), - (0xA792, "M", "ꞓ"), - (0xA793, "V"), - (0xA796, "M", "ꞗ"), - (0xA797, "V"), - (0xA798, "M", "ꞙ"), - (0xA799, "V"), - (0xA79A, "M", "ꞛ"), - (0xA79B, "V"), - (0xA79C, "M", "ꞝ"), - (0xA79D, "V"), - (0xA79E, "M", "ꞟ"), - (0xA79F, "V"), - (0xA7A0, "M", "ꞡ"), - (0xA7A1, "V"), - (0xA7A2, "M", "ꞣ"), - (0xA7A3, "V"), - (0xA7A4, "M", "ꞥ"), - (0xA7A5, "V"), - (0xA7A6, "M", "ꞧ"), - (0xA7A7, "V"), - (0xA7A8, "M", "ꞩ"), - (0xA7A9, "V"), - (0xA7AA, "M", "ɦ"), - (0xA7AB, "M", "ɜ"), - (0xA7AC, "M", "ɡ"), - (0xA7AD, "M", "ɬ"), - (0xA7AE, "M", "ɪ"), - (0xA7AF, "V"), - (0xA7B0, "M", "ʞ"), - (0xA7B1, "M", "ʇ"), - (0xA7B2, "M", "ʝ"), - (0xA7B3, "M", "ꭓ"), - (0xA7B4, "M", "ꞵ"), - (0xA7B5, "V"), - (0xA7B6, "M", "ꞷ"), - (0xA7B7, "V"), - (0xA7B8, "M", "ꞹ"), - (0xA7B9, "V"), - (0xA7BA, "M", "ꞻ"), - (0xA7BB, "V"), - (0xA7BC, "M", "ꞽ"), - (0xA7BD, "V"), - (0xA7BE, "M", "ꞿ"), - (0xA7BF, "V"), - (0xA7C0, "M", "ꟁ"), - (0xA7C1, "V"), - (0xA7C2, "M", "ꟃ"), - (0xA7C3, "V"), - (0xA7C4, "M", "ꞔ"), - (0xA7C5, "M", "ʂ"), - (0xA7C6, "M", "ᶎ"), - (0xA7C7, "M", "ꟈ"), - (0xA7C8, "V"), - (0xA7C9, "M", "ꟊ"), - (0xA7CA, "V"), - (0xA7CB, "X"), - (0xA7D0, "M", "ꟑ"), - (0xA7D1, "V"), - (0xA7D2, "X"), - (0xA7D3, "V"), - (0xA7D4, "X"), - (0xA7D5, "V"), - (0xA7D6, "M", "ꟗ"), - (0xA7D7, "V"), - (0xA7D8, "M", "ꟙ"), - (0xA7D9, "V"), - (0xA7DA, "X"), - (0xA7F2, "M", "c"), - (0xA7F3, "M", "f"), - (0xA7F4, "M", "q"), - (0xA7F5, "M", "ꟶ"), - (0xA7F6, "V"), - (0xA7F8, "M", "ħ"), - (0xA7F9, "M", "œ"), - (0xA7FA, "V"), - (0xA82D, "X"), - (0xA830, "V"), - (0xA83A, "X"), - (0xA840, "V"), - (0xA878, "X"), - (0xA880, "V"), - (0xA8C6, "X"), - (0xA8CE, "V"), - (0xA8DA, "X"), - (0xA8E0, "V"), - (0xA954, "X"), - ] - - -def _seg_38() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xA95F, "V"), - (0xA97D, "X"), - (0xA980, "V"), - (0xA9CE, "X"), - (0xA9CF, "V"), - (0xA9DA, "X"), - (0xA9DE, "V"), - (0xA9FF, "X"), - (0xAA00, "V"), - (0xAA37, "X"), - (0xAA40, "V"), - (0xAA4E, "X"), - (0xAA50, "V"), - (0xAA5A, "X"), - (0xAA5C, "V"), - (0xAAC3, "X"), - (0xAADB, "V"), - (0xAAF7, "X"), - (0xAB01, "V"), - (0xAB07, "X"), - (0xAB09, "V"), - (0xAB0F, "X"), - (0xAB11, "V"), - (0xAB17, "X"), - (0xAB20, "V"), - (0xAB27, "X"), - (0xAB28, "V"), - (0xAB2F, "X"), - (0xAB30, "V"), - (0xAB5C, "M", "ꜧ"), - (0xAB5D, "M", "ꬷ"), - (0xAB5E, "M", "ɫ"), - (0xAB5F, "M", "ꭒ"), - (0xAB60, "V"), - (0xAB69, "M", "ʍ"), - (0xAB6A, "V"), - (0xAB6C, "X"), - (0xAB70, "M", "Ꭰ"), - (0xAB71, "M", "Ꭱ"), - (0xAB72, "M", "Ꭲ"), - (0xAB73, "M", "Ꭳ"), - (0xAB74, "M", "Ꭴ"), - (0xAB75, "M", "Ꭵ"), - (0xAB76, "M", "Ꭶ"), - (0xAB77, "M", "Ꭷ"), - (0xAB78, "M", "Ꭸ"), - (0xAB79, "M", "Ꭹ"), - (0xAB7A, "M", "Ꭺ"), - (0xAB7B, "M", "Ꭻ"), - (0xAB7C, "M", "Ꭼ"), - (0xAB7D, "M", "Ꭽ"), - (0xAB7E, "M", "Ꭾ"), - (0xAB7F, "M", "Ꭿ"), - (0xAB80, "M", "Ꮀ"), - (0xAB81, "M", "Ꮁ"), - (0xAB82, "M", "Ꮂ"), - (0xAB83, "M", "Ꮃ"), - (0xAB84, "M", "Ꮄ"), - (0xAB85, "M", "Ꮅ"), - (0xAB86, "M", "Ꮆ"), - (0xAB87, "M", "Ꮇ"), - (0xAB88, "M", "Ꮈ"), - (0xAB89, "M", "Ꮉ"), - (0xAB8A, "M", "Ꮊ"), - (0xAB8B, "M", "Ꮋ"), - (0xAB8C, "M", "Ꮌ"), - (0xAB8D, "M", "Ꮍ"), - (0xAB8E, "M", "Ꮎ"), - (0xAB8F, "M", "Ꮏ"), - (0xAB90, "M", "Ꮐ"), - (0xAB91, "M", "Ꮑ"), - (0xAB92, "M", "Ꮒ"), - (0xAB93, "M", "Ꮓ"), - (0xAB94, "M", "Ꮔ"), - (0xAB95, "M", "Ꮕ"), - (0xAB96, "M", "Ꮖ"), - (0xAB97, "M", "Ꮗ"), - (0xAB98, "M", "Ꮘ"), - (0xAB99, "M", "Ꮙ"), - (0xAB9A, "M", "Ꮚ"), - (0xAB9B, "M", "Ꮛ"), - (0xAB9C, "M", "Ꮜ"), - (0xAB9D, "M", "Ꮝ"), - (0xAB9E, "M", "Ꮞ"), - (0xAB9F, "M", "Ꮟ"), - (0xABA0, "M", "Ꮠ"), - (0xABA1, "M", "Ꮡ"), - (0xABA2, "M", "Ꮢ"), - (0xABA3, "M", "Ꮣ"), - (0xABA4, "M", "Ꮤ"), - (0xABA5, "M", "Ꮥ"), - (0xABA6, "M", "Ꮦ"), - (0xABA7, "M", "Ꮧ"), - (0xABA8, "M", "Ꮨ"), - (0xABA9, "M", "Ꮩ"), - (0xABAA, "M", "Ꮪ"), - (0xABAB, "M", "Ꮫ"), - (0xABAC, "M", "Ꮬ"), - (0xABAD, "M", "Ꮭ"), - (0xABAE, "M", "Ꮮ"), - ] - - -def _seg_39() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xABAF, "M", "Ꮯ"), - (0xABB0, "M", "Ꮰ"), - (0xABB1, "M", "Ꮱ"), - (0xABB2, "M", "Ꮲ"), - (0xABB3, "M", "Ꮳ"), - (0xABB4, "M", "Ꮴ"), - (0xABB5, "M", "Ꮵ"), - (0xABB6, "M", "Ꮶ"), - (0xABB7, "M", "Ꮷ"), - (0xABB8, "M", "Ꮸ"), - (0xABB9, "M", "Ꮹ"), - (0xABBA, "M", "Ꮺ"), - (0xABBB, "M", "Ꮻ"), - (0xABBC, "M", "Ꮼ"), - (0xABBD, "M", "Ꮽ"), - (0xABBE, "M", "Ꮾ"), - (0xABBF, "M", "Ꮿ"), - (0xABC0, "V"), - (0xABEE, "X"), - (0xABF0, "V"), - (0xABFA, "X"), - (0xAC00, "V"), - (0xD7A4, "X"), - (0xD7B0, "V"), - (0xD7C7, "X"), - (0xD7CB, "V"), - (0xD7FC, "X"), - (0xF900, "M", "豈"), - (0xF901, "M", "更"), - (0xF902, "M", "車"), - (0xF903, "M", "賈"), - (0xF904, "M", "滑"), - (0xF905, "M", "串"), - (0xF906, "M", "句"), - (0xF907, "M", "龜"), - (0xF909, "M", "契"), - (0xF90A, "M", "金"), - (0xF90B, "M", "喇"), - (0xF90C, "M", "奈"), - (0xF90D, "M", "懶"), - (0xF90E, "M", "癩"), - (0xF90F, "M", "羅"), - (0xF910, "M", "蘿"), - (0xF911, "M", "螺"), - (0xF912, "M", "裸"), - (0xF913, "M", "邏"), - (0xF914, "M", "樂"), - (0xF915, "M", "洛"), - (0xF916, "M", "烙"), - (0xF917, "M", "珞"), - (0xF918, "M", "落"), - (0xF919, "M", "酪"), - (0xF91A, "M", "駱"), - (0xF91B, "M", "亂"), - (0xF91C, "M", "卵"), - (0xF91D, "M", "欄"), - (0xF91E, "M", "爛"), - (0xF91F, "M", "蘭"), - (0xF920, "M", "鸞"), - (0xF921, "M", "嵐"), - (0xF922, "M", "濫"), - (0xF923, "M", "藍"), - (0xF924, "M", "襤"), - (0xF925, "M", "拉"), - (0xF926, "M", "臘"), - (0xF927, "M", "蠟"), - (0xF928, "M", "廊"), - (0xF929, "M", "朗"), - (0xF92A, "M", "浪"), - (0xF92B, "M", "狼"), - (0xF92C, "M", "郎"), - (0xF92D, "M", "來"), - (0xF92E, "M", "冷"), - (0xF92F, "M", "勞"), - (0xF930, "M", "擄"), - (0xF931, "M", "櫓"), - (0xF932, "M", "爐"), - (0xF933, "M", "盧"), - (0xF934, "M", "老"), - (0xF935, "M", "蘆"), - (0xF936, "M", "虜"), - (0xF937, "M", "路"), - (0xF938, "M", "露"), - (0xF939, "M", "魯"), - (0xF93A, "M", "鷺"), - (0xF93B, "M", "碌"), - (0xF93C, "M", "祿"), - (0xF93D, "M", "綠"), - (0xF93E, "M", "菉"), - (0xF93F, "M", "錄"), - (0xF940, "M", "鹿"), - (0xF941, "M", "論"), - (0xF942, "M", "壟"), - (0xF943, "M", "弄"), - (0xF944, "M", "籠"), - (0xF945, "M", "聾"), - (0xF946, "M", "牢"), - (0xF947, "M", "磊"), - (0xF948, "M", "賂"), - (0xF949, "M", "雷"), - ] - - -def _seg_40() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF94A, "M", "壘"), - (0xF94B, "M", "屢"), - (0xF94C, "M", "樓"), - (0xF94D, "M", "淚"), - (0xF94E, "M", "漏"), - (0xF94F, "M", "累"), - (0xF950, "M", "縷"), - (0xF951, "M", "陋"), - (0xF952, "M", "勒"), - (0xF953, "M", "肋"), - (0xF954, "M", "凜"), - (0xF955, "M", "凌"), - (0xF956, "M", "稜"), - (0xF957, "M", "綾"), - (0xF958, "M", "菱"), - (0xF959, "M", "陵"), - (0xF95A, "M", "讀"), - (0xF95B, "M", "拏"), - (0xF95C, "M", "樂"), - (0xF95D, "M", "諾"), - (0xF95E, "M", "丹"), - (0xF95F, "M", "寧"), - (0xF960, "M", "怒"), - (0xF961, "M", "率"), - (0xF962, "M", "異"), - (0xF963, "M", "北"), - (0xF964, "M", "磻"), - (0xF965, "M", "便"), - (0xF966, "M", "復"), - (0xF967, "M", "不"), - (0xF968, "M", "泌"), - (0xF969, "M", "數"), - (0xF96A, "M", "索"), - (0xF96B, "M", "參"), - (0xF96C, "M", "塞"), - (0xF96D, "M", "省"), - (0xF96E, "M", "葉"), - (0xF96F, "M", "說"), - (0xF970, "M", "殺"), - (0xF971, "M", "辰"), - (0xF972, "M", "沈"), - (0xF973, "M", "拾"), - (0xF974, "M", "若"), - (0xF975, "M", "掠"), - (0xF976, "M", "略"), - (0xF977, "M", "亮"), - (0xF978, "M", "兩"), - (0xF979, "M", "凉"), - (0xF97A, "M", "梁"), - (0xF97B, "M", "糧"), - (0xF97C, "M", "良"), - (0xF97D, "M", "諒"), - (0xF97E, "M", "量"), - (0xF97F, "M", "勵"), - (0xF980, "M", "呂"), - (0xF981, "M", "女"), - (0xF982, "M", "廬"), - (0xF983, "M", "旅"), - (0xF984, "M", "濾"), - (0xF985, "M", "礪"), - (0xF986, "M", "閭"), - (0xF987, "M", "驪"), - (0xF988, "M", "麗"), - (0xF989, "M", "黎"), - (0xF98A, "M", "力"), - (0xF98B, "M", "曆"), - (0xF98C, "M", "歷"), - (0xF98D, "M", "轢"), - (0xF98E, "M", "年"), - (0xF98F, "M", "憐"), - (0xF990, "M", "戀"), - (0xF991, "M", "撚"), - (0xF992, "M", "漣"), - (0xF993, "M", "煉"), - (0xF994, "M", "璉"), - (0xF995, "M", "秊"), - (0xF996, "M", "練"), - (0xF997, "M", "聯"), - (0xF998, "M", "輦"), - (0xF999, "M", "蓮"), - (0xF99A, "M", "連"), - (0xF99B, "M", "鍊"), - (0xF99C, "M", "列"), - (0xF99D, "M", "劣"), - (0xF99E, "M", "咽"), - (0xF99F, "M", "烈"), - (0xF9A0, "M", "裂"), - (0xF9A1, "M", "說"), - (0xF9A2, "M", "廉"), - (0xF9A3, "M", "念"), - (0xF9A4, "M", "捻"), - (0xF9A5, "M", "殮"), - (0xF9A6, "M", "簾"), - (0xF9A7, "M", "獵"), - (0xF9A8, "M", "令"), - (0xF9A9, "M", "囹"), - (0xF9AA, "M", "寧"), - (0xF9AB, "M", "嶺"), - (0xF9AC, "M", "怜"), - (0xF9AD, "M", "玲"), - ] - - -def _seg_41() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xF9AE, "M", "瑩"), - (0xF9AF, "M", "羚"), - (0xF9B0, "M", "聆"), - (0xF9B1, "M", "鈴"), - (0xF9B2, "M", "零"), - (0xF9B3, "M", "靈"), - (0xF9B4, "M", "領"), - (0xF9B5, "M", "例"), - (0xF9B6, "M", "禮"), - (0xF9B7, "M", "醴"), - (0xF9B8, "M", "隸"), - (0xF9B9, "M", "惡"), - (0xF9BA, "M", "了"), - (0xF9BB, "M", "僚"), - (0xF9BC, "M", "寮"), - (0xF9BD, "M", "尿"), - (0xF9BE, "M", "料"), - (0xF9BF, "M", "樂"), - (0xF9C0, "M", "燎"), - (0xF9C1, "M", "療"), - (0xF9C2, "M", "蓼"), - (0xF9C3, "M", "遼"), - (0xF9C4, "M", "龍"), - (0xF9C5, "M", "暈"), - (0xF9C6, "M", "阮"), - (0xF9C7, "M", "劉"), - (0xF9C8, "M", "杻"), - (0xF9C9, "M", "柳"), - (0xF9CA, "M", "流"), - (0xF9CB, "M", "溜"), - (0xF9CC, "M", "琉"), - (0xF9CD, "M", "留"), - (0xF9CE, "M", "硫"), - (0xF9CF, "M", "紐"), - (0xF9D0, "M", "類"), - (0xF9D1, "M", "六"), - (0xF9D2, "M", "戮"), - (0xF9D3, "M", "陸"), - (0xF9D4, "M", "倫"), - (0xF9D5, "M", "崙"), - (0xF9D6, "M", "淪"), - (0xF9D7, "M", "輪"), - (0xF9D8, "M", "律"), - (0xF9D9, "M", "慄"), - (0xF9DA, "M", "栗"), - (0xF9DB, "M", "率"), - (0xF9DC, "M", "隆"), - (0xF9DD, "M", "利"), - (0xF9DE, "M", "吏"), - (0xF9DF, "M", "履"), - (0xF9E0, "M", "易"), - (0xF9E1, "M", "李"), - (0xF9E2, "M", "梨"), - (0xF9E3, "M", "泥"), - (0xF9E4, "M", "理"), - (0xF9E5, "M", "痢"), - (0xF9E6, "M", "罹"), - (0xF9E7, "M", "裏"), - (0xF9E8, "M", "裡"), - (0xF9E9, "M", "里"), - (0xF9EA, "M", "離"), - (0xF9EB, "M", "匿"), - (0xF9EC, "M", "溺"), - (0xF9ED, "M", "吝"), - (0xF9EE, "M", "燐"), - (0xF9EF, "M", "璘"), - (0xF9F0, "M", "藺"), - (0xF9F1, "M", "隣"), - (0xF9F2, "M", "鱗"), - (0xF9F3, "M", "麟"), - (0xF9F4, "M", "林"), - (0xF9F5, "M", "淋"), - (0xF9F6, "M", "臨"), - (0xF9F7, "M", "立"), - (0xF9F8, "M", "笠"), - (0xF9F9, "M", "粒"), - (0xF9FA, "M", "狀"), - (0xF9FB, "M", "炙"), - (0xF9FC, "M", "識"), - (0xF9FD, "M", "什"), - (0xF9FE, "M", "茶"), - (0xF9FF, "M", "刺"), - (0xFA00, "M", "切"), - (0xFA01, "M", "度"), - (0xFA02, "M", "拓"), - (0xFA03, "M", "糖"), - (0xFA04, "M", "宅"), - (0xFA05, "M", "洞"), - (0xFA06, "M", "暴"), - (0xFA07, "M", "輻"), - (0xFA08, "M", "行"), - (0xFA09, "M", "降"), - (0xFA0A, "M", "見"), - (0xFA0B, "M", "廓"), - (0xFA0C, "M", "兀"), - (0xFA0D, "M", "嗀"), - (0xFA0E, "V"), - (0xFA10, "M", "塚"), - (0xFA11, "V"), - (0xFA12, "M", "晴"), - ] - - -def _seg_42() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFA13, "V"), - (0xFA15, "M", "凞"), - (0xFA16, "M", "猪"), - (0xFA17, "M", "益"), - (0xFA18, "M", "礼"), - (0xFA19, "M", "神"), - (0xFA1A, "M", "祥"), - (0xFA1B, "M", "福"), - (0xFA1C, "M", "靖"), - (0xFA1D, "M", "精"), - (0xFA1E, "M", "羽"), - (0xFA1F, "V"), - (0xFA20, "M", "蘒"), - (0xFA21, "V"), - (0xFA22, "M", "諸"), - (0xFA23, "V"), - (0xFA25, "M", "逸"), - (0xFA26, "M", "都"), - (0xFA27, "V"), - (0xFA2A, "M", "飯"), - (0xFA2B, "M", "飼"), - (0xFA2C, "M", "館"), - (0xFA2D, "M", "鶴"), - (0xFA2E, "M", "郞"), - (0xFA2F, "M", "隷"), - (0xFA30, "M", "侮"), - (0xFA31, "M", "僧"), - (0xFA32, "M", "免"), - (0xFA33, "M", "勉"), - (0xFA34, "M", "勤"), - (0xFA35, "M", "卑"), - (0xFA36, "M", "喝"), - (0xFA37, "M", "嘆"), - (0xFA38, "M", "器"), - (0xFA39, "M", "塀"), - (0xFA3A, "M", "墨"), - (0xFA3B, "M", "層"), - (0xFA3C, "M", "屮"), - (0xFA3D, "M", "悔"), - (0xFA3E, "M", "慨"), - (0xFA3F, "M", "憎"), - (0xFA40, "M", "懲"), - (0xFA41, "M", "敏"), - (0xFA42, "M", "既"), - (0xFA43, "M", "暑"), - (0xFA44, "M", "梅"), - (0xFA45, "M", "海"), - (0xFA46, "M", "渚"), - (0xFA47, "M", "漢"), - (0xFA48, "M", "煮"), - (0xFA49, "M", "爫"), - (0xFA4A, "M", "琢"), - (0xFA4B, "M", "碑"), - (0xFA4C, "M", "社"), - (0xFA4D, "M", "祉"), - (0xFA4E, "M", "祈"), - (0xFA4F, "M", "祐"), - (0xFA50, "M", "祖"), - (0xFA51, "M", "祝"), - (0xFA52, "M", "禍"), - (0xFA53, "M", "禎"), - (0xFA54, "M", "穀"), - (0xFA55, "M", "突"), - (0xFA56, "M", "節"), - (0xFA57, "M", "練"), - (0xFA58, "M", "縉"), - (0xFA59, "M", "繁"), - (0xFA5A, "M", "署"), - (0xFA5B, "M", "者"), - (0xFA5C, "M", "臭"), - (0xFA5D, "M", "艹"), - (0xFA5F, "M", "著"), - (0xFA60, "M", "褐"), - (0xFA61, "M", "視"), - (0xFA62, "M", "謁"), - (0xFA63, "M", "謹"), - (0xFA64, "M", "賓"), - (0xFA65, "M", "贈"), - (0xFA66, "M", "辶"), - (0xFA67, "M", "逸"), - (0xFA68, "M", "難"), - (0xFA69, "M", "響"), - (0xFA6A, "M", "頻"), - (0xFA6B, "M", "恵"), - (0xFA6C, "M", "𤋮"), - (0xFA6D, "M", "舘"), - (0xFA6E, "X"), - (0xFA70, "M", "並"), - (0xFA71, "M", "况"), - (0xFA72, "M", "全"), - (0xFA73, "M", "侀"), - (0xFA74, "M", "充"), - (0xFA75, "M", "冀"), - (0xFA76, "M", "勇"), - (0xFA77, "M", "勺"), - (0xFA78, "M", "喝"), - (0xFA79, "M", "啕"), - (0xFA7A, "M", "喙"), - (0xFA7B, "M", "嗢"), - (0xFA7C, "M", "塚"), - ] - - -def _seg_43() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFA7D, "M", "墳"), - (0xFA7E, "M", "奄"), - (0xFA7F, "M", "奔"), - (0xFA80, "M", "婢"), - (0xFA81, "M", "嬨"), - (0xFA82, "M", "廒"), - (0xFA83, "M", "廙"), - (0xFA84, "M", "彩"), - (0xFA85, "M", "徭"), - (0xFA86, "M", "惘"), - (0xFA87, "M", "慎"), - (0xFA88, "M", "愈"), - (0xFA89, "M", "憎"), - (0xFA8A, "M", "慠"), - (0xFA8B, "M", "懲"), - (0xFA8C, "M", "戴"), - (0xFA8D, "M", "揄"), - (0xFA8E, "M", "搜"), - (0xFA8F, "M", "摒"), - (0xFA90, "M", "敖"), - (0xFA91, "M", "晴"), - (0xFA92, "M", "朗"), - (0xFA93, "M", "望"), - (0xFA94, "M", "杖"), - (0xFA95, "M", "歹"), - (0xFA96, "M", "殺"), - (0xFA97, "M", "流"), - (0xFA98, "M", "滛"), - (0xFA99, "M", "滋"), - (0xFA9A, "M", "漢"), - (0xFA9B, "M", "瀞"), - (0xFA9C, "M", "煮"), - (0xFA9D, "M", "瞧"), - (0xFA9E, "M", "爵"), - (0xFA9F, "M", "犯"), - (0xFAA0, "M", "猪"), - (0xFAA1, "M", "瑱"), - (0xFAA2, "M", "甆"), - (0xFAA3, "M", "画"), - (0xFAA4, "M", "瘝"), - (0xFAA5, "M", "瘟"), - (0xFAA6, "M", "益"), - (0xFAA7, "M", "盛"), - (0xFAA8, "M", "直"), - (0xFAA9, "M", "睊"), - (0xFAAA, "M", "着"), - (0xFAAB, "M", "磌"), - (0xFAAC, "M", "窱"), - (0xFAAD, "M", "節"), - (0xFAAE, "M", "类"), - (0xFAAF, "M", "絛"), - (0xFAB0, "M", "練"), - (0xFAB1, "M", "缾"), - (0xFAB2, "M", "者"), - (0xFAB3, "M", "荒"), - (0xFAB4, "M", "華"), - (0xFAB5, "M", "蝹"), - (0xFAB6, "M", "襁"), - (0xFAB7, "M", "覆"), - (0xFAB8, "M", "視"), - (0xFAB9, "M", "調"), - (0xFABA, "M", "諸"), - (0xFABB, "M", "請"), - (0xFABC, "M", "謁"), - (0xFABD, "M", "諾"), - (0xFABE, "M", "諭"), - (0xFABF, "M", "謹"), - (0xFAC0, "M", "變"), - (0xFAC1, "M", "贈"), - (0xFAC2, "M", "輸"), - (0xFAC3, "M", "遲"), - (0xFAC4, "M", "醙"), - (0xFAC5, "M", "鉶"), - (0xFAC6, "M", "陼"), - (0xFAC7, "M", "難"), - (0xFAC8, "M", "靖"), - (0xFAC9, "M", "韛"), - (0xFACA, "M", "響"), - (0xFACB, "M", "頋"), - (0xFACC, "M", "頻"), - (0xFACD, "M", "鬒"), - (0xFACE, "M", "龜"), - (0xFACF, "M", "𢡊"), - (0xFAD0, "M", "𢡄"), - (0xFAD1, "M", "𣏕"), - (0xFAD2, "M", "㮝"), - (0xFAD3, "M", "䀘"), - (0xFAD4, "M", "䀹"), - (0xFAD5, "M", "𥉉"), - (0xFAD6, "M", "𥳐"), - (0xFAD7, "M", "𧻓"), - (0xFAD8, "M", "齃"), - (0xFAD9, "M", "龎"), - (0xFADA, "X"), - (0xFB00, "M", "ff"), - (0xFB01, "M", "fi"), - (0xFB02, "M", "fl"), - (0xFB03, "M", "ffi"), - (0xFB04, "M", "ffl"), - (0xFB05, "M", "st"), - ] - - -def _seg_44() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFB07, "X"), - (0xFB13, "M", "մն"), - (0xFB14, "M", "մե"), - (0xFB15, "M", "մի"), - (0xFB16, "M", "վն"), - (0xFB17, "M", "մխ"), - (0xFB18, "X"), - (0xFB1D, "M", "יִ"), - (0xFB1E, "V"), - (0xFB1F, "M", "ײַ"), - (0xFB20, "M", "ע"), - (0xFB21, "M", "א"), - (0xFB22, "M", "ד"), - (0xFB23, "M", "ה"), - (0xFB24, "M", "כ"), - (0xFB25, "M", "ל"), - (0xFB26, "M", "ם"), - (0xFB27, "M", "ר"), - (0xFB28, "M", "ת"), - (0xFB29, "3", "+"), - (0xFB2A, "M", "שׁ"), - (0xFB2B, "M", "שׂ"), - (0xFB2C, "M", "שּׁ"), - (0xFB2D, "M", "שּׂ"), - (0xFB2E, "M", "אַ"), - (0xFB2F, "M", "אָ"), - (0xFB30, "M", "אּ"), - (0xFB31, "M", "בּ"), - (0xFB32, "M", "גּ"), - (0xFB33, "M", "דּ"), - (0xFB34, "M", "הּ"), - (0xFB35, "M", "וּ"), - (0xFB36, "M", "זּ"), - (0xFB37, "X"), - (0xFB38, "M", "טּ"), - (0xFB39, "M", "יּ"), - (0xFB3A, "M", "ךּ"), - (0xFB3B, "M", "כּ"), - (0xFB3C, "M", "לּ"), - (0xFB3D, "X"), - (0xFB3E, "M", "מּ"), - (0xFB3F, "X"), - (0xFB40, "M", "נּ"), - (0xFB41, "M", "סּ"), - (0xFB42, "X"), - (0xFB43, "M", "ףּ"), - (0xFB44, "M", "פּ"), - (0xFB45, "X"), - (0xFB46, "M", "צּ"), - (0xFB47, "M", "קּ"), - (0xFB48, "M", "רּ"), - (0xFB49, "M", "שּ"), - (0xFB4A, "M", "תּ"), - (0xFB4B, "M", "וֹ"), - (0xFB4C, "M", "בֿ"), - (0xFB4D, "M", "כֿ"), - (0xFB4E, "M", "פֿ"), - (0xFB4F, "M", "אל"), - (0xFB50, "M", "ٱ"), - (0xFB52, "M", "ٻ"), - (0xFB56, "M", "پ"), - (0xFB5A, "M", "ڀ"), - (0xFB5E, "M", "ٺ"), - (0xFB62, "M", "ٿ"), - (0xFB66, "M", "ٹ"), - (0xFB6A, "M", "ڤ"), - (0xFB6E, "M", "ڦ"), - (0xFB72, "M", "ڄ"), - (0xFB76, "M", "ڃ"), - (0xFB7A, "M", "چ"), - (0xFB7E, "M", "ڇ"), - (0xFB82, "M", "ڍ"), - (0xFB84, "M", "ڌ"), - (0xFB86, "M", "ڎ"), - (0xFB88, "M", "ڈ"), - (0xFB8A, "M", "ژ"), - (0xFB8C, "M", "ڑ"), - (0xFB8E, "M", "ک"), - (0xFB92, "M", "گ"), - (0xFB96, "M", "ڳ"), - (0xFB9A, "M", "ڱ"), - (0xFB9E, "M", "ں"), - (0xFBA0, "M", "ڻ"), - (0xFBA4, "M", "ۀ"), - (0xFBA6, "M", "ہ"), - (0xFBAA, "M", "ھ"), - (0xFBAE, "M", "ے"), - (0xFBB0, "M", "ۓ"), - (0xFBB2, "V"), - (0xFBC3, "X"), - (0xFBD3, "M", "ڭ"), - (0xFBD7, "M", "ۇ"), - (0xFBD9, "M", "ۆ"), - (0xFBDB, "M", "ۈ"), - (0xFBDD, "M", "ۇٴ"), - (0xFBDE, "M", "ۋ"), - (0xFBE0, "M", "ۅ"), - (0xFBE2, "M", "ۉ"), - (0xFBE4, "M", "ې"), - (0xFBE8, "M", "ى"), - ] - - -def _seg_45() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFBEA, "M", "ئا"), - (0xFBEC, "M", "ئە"), - (0xFBEE, "M", "ئو"), - (0xFBF0, "M", "ئۇ"), - (0xFBF2, "M", "ئۆ"), - (0xFBF4, "M", "ئۈ"), - (0xFBF6, "M", "ئې"), - (0xFBF9, "M", "ئى"), - (0xFBFC, "M", "ی"), - (0xFC00, "M", "ئج"), - (0xFC01, "M", "ئح"), - (0xFC02, "M", "ئم"), - (0xFC03, "M", "ئى"), - (0xFC04, "M", "ئي"), - (0xFC05, "M", "بج"), - (0xFC06, "M", "بح"), - (0xFC07, "M", "بخ"), - (0xFC08, "M", "بم"), - (0xFC09, "M", "بى"), - (0xFC0A, "M", "بي"), - (0xFC0B, "M", "تج"), - (0xFC0C, "M", "تح"), - (0xFC0D, "M", "تخ"), - (0xFC0E, "M", "تم"), - (0xFC0F, "M", "تى"), - (0xFC10, "M", "تي"), - (0xFC11, "M", "ثج"), - (0xFC12, "M", "ثم"), - (0xFC13, "M", "ثى"), - (0xFC14, "M", "ثي"), - (0xFC15, "M", "جح"), - (0xFC16, "M", "جم"), - (0xFC17, "M", "حج"), - (0xFC18, "M", "حم"), - (0xFC19, "M", "خج"), - (0xFC1A, "M", "خح"), - (0xFC1B, "M", "خم"), - (0xFC1C, "M", "سج"), - (0xFC1D, "M", "سح"), - (0xFC1E, "M", "سخ"), - (0xFC1F, "M", "سم"), - (0xFC20, "M", "صح"), - (0xFC21, "M", "صم"), - (0xFC22, "M", "ضج"), - (0xFC23, "M", "ضح"), - (0xFC24, "M", "ضخ"), - (0xFC25, "M", "ضم"), - (0xFC26, "M", "طح"), - (0xFC27, "M", "طم"), - (0xFC28, "M", "ظم"), - (0xFC29, "M", "عج"), - (0xFC2A, "M", "عم"), - (0xFC2B, "M", "غج"), - (0xFC2C, "M", "غم"), - (0xFC2D, "M", "فج"), - (0xFC2E, "M", "فح"), - (0xFC2F, "M", "فخ"), - (0xFC30, "M", "فم"), - (0xFC31, "M", "فى"), - (0xFC32, "M", "في"), - (0xFC33, "M", "قح"), - (0xFC34, "M", "قم"), - (0xFC35, "M", "قى"), - (0xFC36, "M", "قي"), - (0xFC37, "M", "كا"), - (0xFC38, "M", "كج"), - (0xFC39, "M", "كح"), - (0xFC3A, "M", "كخ"), - (0xFC3B, "M", "كل"), - (0xFC3C, "M", "كم"), - (0xFC3D, "M", "كى"), - (0xFC3E, "M", "كي"), - (0xFC3F, "M", "لج"), - (0xFC40, "M", "لح"), - (0xFC41, "M", "لخ"), - (0xFC42, "M", "لم"), - (0xFC43, "M", "لى"), - (0xFC44, "M", "لي"), - (0xFC45, "M", "مج"), - (0xFC46, "M", "مح"), - (0xFC47, "M", "مخ"), - (0xFC48, "M", "مم"), - (0xFC49, "M", "مى"), - (0xFC4A, "M", "مي"), - (0xFC4B, "M", "نج"), - (0xFC4C, "M", "نح"), - (0xFC4D, "M", "نخ"), - (0xFC4E, "M", "نم"), - (0xFC4F, "M", "نى"), - (0xFC50, "M", "ني"), - (0xFC51, "M", "هج"), - (0xFC52, "M", "هم"), - (0xFC53, "M", "هى"), - (0xFC54, "M", "هي"), - (0xFC55, "M", "يج"), - (0xFC56, "M", "يح"), - (0xFC57, "M", "يخ"), - (0xFC58, "M", "يم"), - (0xFC59, "M", "يى"), - (0xFC5A, "M", "يي"), - ] - - -def _seg_46() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFC5B, "M", "ذٰ"), - (0xFC5C, "M", "رٰ"), - (0xFC5D, "M", "ىٰ"), - (0xFC5E, "3", " ٌّ"), - (0xFC5F, "3", " ٍّ"), - (0xFC60, "3", " َّ"), - (0xFC61, "3", " ُّ"), - (0xFC62, "3", " ِّ"), - (0xFC63, "3", " ّٰ"), - (0xFC64, "M", "ئر"), - (0xFC65, "M", "ئز"), - (0xFC66, "M", "ئم"), - (0xFC67, "M", "ئن"), - (0xFC68, "M", "ئى"), - (0xFC69, "M", "ئي"), - (0xFC6A, "M", "بر"), - (0xFC6B, "M", "بز"), - (0xFC6C, "M", "بم"), - (0xFC6D, "M", "بن"), - (0xFC6E, "M", "بى"), - (0xFC6F, "M", "بي"), - (0xFC70, "M", "تر"), - (0xFC71, "M", "تز"), - (0xFC72, "M", "تم"), - (0xFC73, "M", "تن"), - (0xFC74, "M", "تى"), - (0xFC75, "M", "تي"), - (0xFC76, "M", "ثر"), - (0xFC77, "M", "ثز"), - (0xFC78, "M", "ثم"), - (0xFC79, "M", "ثن"), - (0xFC7A, "M", "ثى"), - (0xFC7B, "M", "ثي"), - (0xFC7C, "M", "فى"), - (0xFC7D, "M", "في"), - (0xFC7E, "M", "قى"), - (0xFC7F, "M", "قي"), - (0xFC80, "M", "كا"), - (0xFC81, "M", "كل"), - (0xFC82, "M", "كم"), - (0xFC83, "M", "كى"), - (0xFC84, "M", "كي"), - (0xFC85, "M", "لم"), - (0xFC86, "M", "لى"), - (0xFC87, "M", "لي"), - (0xFC88, "M", "ما"), - (0xFC89, "M", "مم"), - (0xFC8A, "M", "نر"), - (0xFC8B, "M", "نز"), - (0xFC8C, "M", "نم"), - (0xFC8D, "M", "نن"), - (0xFC8E, "M", "نى"), - (0xFC8F, "M", "ني"), - (0xFC90, "M", "ىٰ"), - (0xFC91, "M", "ير"), - (0xFC92, "M", "يز"), - (0xFC93, "M", "يم"), - (0xFC94, "M", "ين"), - (0xFC95, "M", "يى"), - (0xFC96, "M", "يي"), - (0xFC97, "M", "ئج"), - (0xFC98, "M", "ئح"), - (0xFC99, "M", "ئخ"), - (0xFC9A, "M", "ئم"), - (0xFC9B, "M", "ئه"), - (0xFC9C, "M", "بج"), - (0xFC9D, "M", "بح"), - (0xFC9E, "M", "بخ"), - (0xFC9F, "M", "بم"), - (0xFCA0, "M", "به"), - (0xFCA1, "M", "تج"), - (0xFCA2, "M", "تح"), - (0xFCA3, "M", "تخ"), - (0xFCA4, "M", "تم"), - (0xFCA5, "M", "ته"), - (0xFCA6, "M", "ثم"), - (0xFCA7, "M", "جح"), - (0xFCA8, "M", "جم"), - (0xFCA9, "M", "حج"), - (0xFCAA, "M", "حم"), - (0xFCAB, "M", "خج"), - (0xFCAC, "M", "خم"), - (0xFCAD, "M", "سج"), - (0xFCAE, "M", "سح"), - (0xFCAF, "M", "سخ"), - (0xFCB0, "M", "سم"), - (0xFCB1, "M", "صح"), - (0xFCB2, "M", "صخ"), - (0xFCB3, "M", "صم"), - (0xFCB4, "M", "ضج"), - (0xFCB5, "M", "ضح"), - (0xFCB6, "M", "ضخ"), - (0xFCB7, "M", "ضم"), - (0xFCB8, "M", "طح"), - (0xFCB9, "M", "ظم"), - (0xFCBA, "M", "عج"), - (0xFCBB, "M", "عم"), - (0xFCBC, "M", "غج"), - (0xFCBD, "M", "غم"), - (0xFCBE, "M", "فج"), - ] - - -def _seg_47() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFCBF, "M", "فح"), - (0xFCC0, "M", "فخ"), - (0xFCC1, "M", "فم"), - (0xFCC2, "M", "قح"), - (0xFCC3, "M", "قم"), - (0xFCC4, "M", "كج"), - (0xFCC5, "M", "كح"), - (0xFCC6, "M", "كخ"), - (0xFCC7, "M", "كل"), - (0xFCC8, "M", "كم"), - (0xFCC9, "M", "لج"), - (0xFCCA, "M", "لح"), - (0xFCCB, "M", "لخ"), - (0xFCCC, "M", "لم"), - (0xFCCD, "M", "له"), - (0xFCCE, "M", "مج"), - (0xFCCF, "M", "مح"), - (0xFCD0, "M", "مخ"), - (0xFCD1, "M", "مم"), - (0xFCD2, "M", "نج"), - (0xFCD3, "M", "نح"), - (0xFCD4, "M", "نخ"), - (0xFCD5, "M", "نم"), - (0xFCD6, "M", "نه"), - (0xFCD7, "M", "هج"), - (0xFCD8, "M", "هم"), - (0xFCD9, "M", "هٰ"), - (0xFCDA, "M", "يج"), - (0xFCDB, "M", "يح"), - (0xFCDC, "M", "يخ"), - (0xFCDD, "M", "يم"), - (0xFCDE, "M", "يه"), - (0xFCDF, "M", "ئم"), - (0xFCE0, "M", "ئه"), - (0xFCE1, "M", "بم"), - (0xFCE2, "M", "به"), - (0xFCE3, "M", "تم"), - (0xFCE4, "M", "ته"), - (0xFCE5, "M", "ثم"), - (0xFCE6, "M", "ثه"), - (0xFCE7, "M", "سم"), - (0xFCE8, "M", "سه"), - (0xFCE9, "M", "شم"), - (0xFCEA, "M", "شه"), - (0xFCEB, "M", "كل"), - (0xFCEC, "M", "كم"), - (0xFCED, "M", "لم"), - (0xFCEE, "M", "نم"), - (0xFCEF, "M", "نه"), - (0xFCF0, "M", "يم"), - (0xFCF1, "M", "يه"), - (0xFCF2, "M", "ـَّ"), - (0xFCF3, "M", "ـُّ"), - (0xFCF4, "M", "ـِّ"), - (0xFCF5, "M", "طى"), - (0xFCF6, "M", "طي"), - (0xFCF7, "M", "عى"), - (0xFCF8, "M", "عي"), - (0xFCF9, "M", "غى"), - (0xFCFA, "M", "غي"), - (0xFCFB, "M", "سى"), - (0xFCFC, "M", "سي"), - (0xFCFD, "M", "شى"), - (0xFCFE, "M", "شي"), - (0xFCFF, "M", "حى"), - (0xFD00, "M", "حي"), - (0xFD01, "M", "جى"), - (0xFD02, "M", "جي"), - (0xFD03, "M", "خى"), - (0xFD04, "M", "خي"), - (0xFD05, "M", "صى"), - (0xFD06, "M", "صي"), - (0xFD07, "M", "ضى"), - (0xFD08, "M", "ضي"), - (0xFD09, "M", "شج"), - (0xFD0A, "M", "شح"), - (0xFD0B, "M", "شخ"), - (0xFD0C, "M", "شم"), - (0xFD0D, "M", "شر"), - (0xFD0E, "M", "سر"), - (0xFD0F, "M", "صر"), - (0xFD10, "M", "ضر"), - (0xFD11, "M", "طى"), - (0xFD12, "M", "طي"), - (0xFD13, "M", "عى"), - (0xFD14, "M", "عي"), - (0xFD15, "M", "غى"), - (0xFD16, "M", "غي"), - (0xFD17, "M", "سى"), - (0xFD18, "M", "سي"), - (0xFD19, "M", "شى"), - (0xFD1A, "M", "شي"), - (0xFD1B, "M", "حى"), - (0xFD1C, "M", "حي"), - (0xFD1D, "M", "جى"), - (0xFD1E, "M", "جي"), - (0xFD1F, "M", "خى"), - (0xFD20, "M", "خي"), - (0xFD21, "M", "صى"), - (0xFD22, "M", "صي"), - ] - - -def _seg_48() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFD23, "M", "ضى"), - (0xFD24, "M", "ضي"), - (0xFD25, "M", "شج"), - (0xFD26, "M", "شح"), - (0xFD27, "M", "شخ"), - (0xFD28, "M", "شم"), - (0xFD29, "M", "شر"), - (0xFD2A, "M", "سر"), - (0xFD2B, "M", "صر"), - (0xFD2C, "M", "ضر"), - (0xFD2D, "M", "شج"), - (0xFD2E, "M", "شح"), - (0xFD2F, "M", "شخ"), - (0xFD30, "M", "شم"), - (0xFD31, "M", "سه"), - (0xFD32, "M", "شه"), - (0xFD33, "M", "طم"), - (0xFD34, "M", "سج"), - (0xFD35, "M", "سح"), - (0xFD36, "M", "سخ"), - (0xFD37, "M", "شج"), - (0xFD38, "M", "شح"), - (0xFD39, "M", "شخ"), - (0xFD3A, "M", "طم"), - (0xFD3B, "M", "ظم"), - (0xFD3C, "M", "اً"), - (0xFD3E, "V"), - (0xFD50, "M", "تجم"), - (0xFD51, "M", "تحج"), - (0xFD53, "M", "تحم"), - (0xFD54, "M", "تخم"), - (0xFD55, "M", "تمج"), - (0xFD56, "M", "تمح"), - (0xFD57, "M", "تمخ"), - (0xFD58, "M", "جمح"), - (0xFD5A, "M", "حمي"), - (0xFD5B, "M", "حمى"), - (0xFD5C, "M", "سحج"), - (0xFD5D, "M", "سجح"), - (0xFD5E, "M", "سجى"), - (0xFD5F, "M", "سمح"), - (0xFD61, "M", "سمج"), - (0xFD62, "M", "سمم"), - (0xFD64, "M", "صحح"), - (0xFD66, "M", "صمم"), - (0xFD67, "M", "شحم"), - (0xFD69, "M", "شجي"), - (0xFD6A, "M", "شمخ"), - (0xFD6C, "M", "شمم"), - (0xFD6E, "M", "ضحى"), - (0xFD6F, "M", "ضخم"), - (0xFD71, "M", "طمح"), - (0xFD73, "M", "طمم"), - (0xFD74, "M", "طمي"), - (0xFD75, "M", "عجم"), - (0xFD76, "M", "عمم"), - (0xFD78, "M", "عمى"), - (0xFD79, "M", "غمم"), - (0xFD7A, "M", "غمي"), - (0xFD7B, "M", "غمى"), - (0xFD7C, "M", "فخم"), - (0xFD7E, "M", "قمح"), - (0xFD7F, "M", "قمم"), - (0xFD80, "M", "لحم"), - (0xFD81, "M", "لحي"), - (0xFD82, "M", "لحى"), - (0xFD83, "M", "لجج"), - (0xFD85, "M", "لخم"), - (0xFD87, "M", "لمح"), - (0xFD89, "M", "محج"), - (0xFD8A, "M", "محم"), - (0xFD8B, "M", "محي"), - (0xFD8C, "M", "مجح"), - (0xFD8D, "M", "مجم"), - (0xFD8E, "M", "مخج"), - (0xFD8F, "M", "مخم"), - (0xFD90, "X"), - (0xFD92, "M", "مجخ"), - (0xFD93, "M", "همج"), - (0xFD94, "M", "همم"), - (0xFD95, "M", "نحم"), - (0xFD96, "M", "نحى"), - (0xFD97, "M", "نجم"), - (0xFD99, "M", "نجى"), - (0xFD9A, "M", "نمي"), - (0xFD9B, "M", "نمى"), - (0xFD9C, "M", "يمم"), - (0xFD9E, "M", "بخي"), - (0xFD9F, "M", "تجي"), - (0xFDA0, "M", "تجى"), - (0xFDA1, "M", "تخي"), - (0xFDA2, "M", "تخى"), - (0xFDA3, "M", "تمي"), - (0xFDA4, "M", "تمى"), - (0xFDA5, "M", "جمي"), - (0xFDA6, "M", "جحى"), - (0xFDA7, "M", "جمى"), - (0xFDA8, "M", "سخى"), - (0xFDA9, "M", "صحي"), - (0xFDAA, "M", "شحي"), - ] - - -def _seg_49() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFDAB, "M", "ضحي"), - (0xFDAC, "M", "لجي"), - (0xFDAD, "M", "لمي"), - (0xFDAE, "M", "يحي"), - (0xFDAF, "M", "يجي"), - (0xFDB0, "M", "يمي"), - (0xFDB1, "M", "ممي"), - (0xFDB2, "M", "قمي"), - (0xFDB3, "M", "نحي"), - (0xFDB4, "M", "قمح"), - (0xFDB5, "M", "لحم"), - (0xFDB6, "M", "عمي"), - (0xFDB7, "M", "كمي"), - (0xFDB8, "M", "نجح"), - (0xFDB9, "M", "مخي"), - (0xFDBA, "M", "لجم"), - (0xFDBB, "M", "كمم"), - (0xFDBC, "M", "لجم"), - (0xFDBD, "M", "نجح"), - (0xFDBE, "M", "جحي"), - (0xFDBF, "M", "حجي"), - (0xFDC0, "M", "مجي"), - (0xFDC1, "M", "فمي"), - (0xFDC2, "M", "بحي"), - (0xFDC3, "M", "كمم"), - (0xFDC4, "M", "عجم"), - (0xFDC5, "M", "صمم"), - (0xFDC6, "M", "سخي"), - (0xFDC7, "M", "نجي"), - (0xFDC8, "X"), - (0xFDCF, "V"), - (0xFDD0, "X"), - (0xFDF0, "M", "صلے"), - (0xFDF1, "M", "قلے"), - (0xFDF2, "M", "الله"), - (0xFDF3, "M", "اكبر"), - (0xFDF4, "M", "محمد"), - (0xFDF5, "M", "صلعم"), - (0xFDF6, "M", "رسول"), - (0xFDF7, "M", "عليه"), - (0xFDF8, "M", "وسلم"), - (0xFDF9, "M", "صلى"), - (0xFDFA, "3", "صلى الله عليه وسلم"), - (0xFDFB, "3", "جل جلاله"), - (0xFDFC, "M", "ریال"), - (0xFDFD, "V"), - (0xFE00, "I"), - (0xFE10, "3", ","), - (0xFE11, "M", "、"), - (0xFE12, "X"), - (0xFE13, "3", ":"), - (0xFE14, "3", ";"), - (0xFE15, "3", "!"), - (0xFE16, "3", "?"), - (0xFE17, "M", "〖"), - (0xFE18, "M", "〗"), - (0xFE19, "X"), - (0xFE20, "V"), - (0xFE30, "X"), - (0xFE31, "M", "—"), - (0xFE32, "M", "–"), - (0xFE33, "3", "_"), - (0xFE35, "3", "("), - (0xFE36, "3", ")"), - (0xFE37, "3", "{"), - (0xFE38, "3", "}"), - (0xFE39, "M", "〔"), - (0xFE3A, "M", "〕"), - (0xFE3B, "M", "【"), - (0xFE3C, "M", "】"), - (0xFE3D, "M", "《"), - (0xFE3E, "M", "》"), - (0xFE3F, "M", "〈"), - (0xFE40, "M", "〉"), - (0xFE41, "M", "「"), - (0xFE42, "M", "」"), - (0xFE43, "M", "『"), - (0xFE44, "M", "』"), - (0xFE45, "V"), - (0xFE47, "3", "["), - (0xFE48, "3", "]"), - (0xFE49, "3", " ̅"), - (0xFE4D, "3", "_"), - (0xFE50, "3", ","), - (0xFE51, "M", "、"), - (0xFE52, "X"), - (0xFE54, "3", ";"), - (0xFE55, "3", ":"), - (0xFE56, "3", "?"), - (0xFE57, "3", "!"), - (0xFE58, "M", "—"), - (0xFE59, "3", "("), - (0xFE5A, "3", ")"), - (0xFE5B, "3", "{"), - (0xFE5C, "3", "}"), - (0xFE5D, "M", "〔"), - (0xFE5E, "M", "〕"), - (0xFE5F, "3", "#"), - (0xFE60, "3", "&"), - (0xFE61, "3", "*"), - ] - - -def _seg_50() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFE62, "3", "+"), - (0xFE63, "M", "-"), - (0xFE64, "3", "<"), - (0xFE65, "3", ">"), - (0xFE66, "3", "="), - (0xFE67, "X"), - (0xFE68, "3", "\\"), - (0xFE69, "3", "$"), - (0xFE6A, "3", "%"), - (0xFE6B, "3", "@"), - (0xFE6C, "X"), - (0xFE70, "3", " ً"), - (0xFE71, "M", "ـً"), - (0xFE72, "3", " ٌ"), - (0xFE73, "V"), - (0xFE74, "3", " ٍ"), - (0xFE75, "X"), - (0xFE76, "3", " َ"), - (0xFE77, "M", "ـَ"), - (0xFE78, "3", " ُ"), - (0xFE79, "M", "ـُ"), - (0xFE7A, "3", " ِ"), - (0xFE7B, "M", "ـِ"), - (0xFE7C, "3", " ّ"), - (0xFE7D, "M", "ـّ"), - (0xFE7E, "3", " ْ"), - (0xFE7F, "M", "ـْ"), - (0xFE80, "M", "ء"), - (0xFE81, "M", "آ"), - (0xFE83, "M", "أ"), - (0xFE85, "M", "ؤ"), - (0xFE87, "M", "إ"), - (0xFE89, "M", "ئ"), - (0xFE8D, "M", "ا"), - (0xFE8F, "M", "ب"), - (0xFE93, "M", "ة"), - (0xFE95, "M", "ت"), - (0xFE99, "M", "ث"), - (0xFE9D, "M", "ج"), - (0xFEA1, "M", "ح"), - (0xFEA5, "M", "خ"), - (0xFEA9, "M", "د"), - (0xFEAB, "M", "ذ"), - (0xFEAD, "M", "ر"), - (0xFEAF, "M", "ز"), - (0xFEB1, "M", "س"), - (0xFEB5, "M", "ش"), - (0xFEB9, "M", "ص"), - (0xFEBD, "M", "ض"), - (0xFEC1, "M", "ط"), - (0xFEC5, "M", "ظ"), - (0xFEC9, "M", "ع"), - (0xFECD, "M", "غ"), - (0xFED1, "M", "ف"), - (0xFED5, "M", "ق"), - (0xFED9, "M", "ك"), - (0xFEDD, "M", "ل"), - (0xFEE1, "M", "م"), - (0xFEE5, "M", "ن"), - (0xFEE9, "M", "ه"), - (0xFEED, "M", "و"), - (0xFEEF, "M", "ى"), - (0xFEF1, "M", "ي"), - (0xFEF5, "M", "لآ"), - (0xFEF7, "M", "لأ"), - (0xFEF9, "M", "لإ"), - (0xFEFB, "M", "لا"), - (0xFEFD, "X"), - (0xFEFF, "I"), - (0xFF00, "X"), - (0xFF01, "3", "!"), - (0xFF02, "3", '"'), - (0xFF03, "3", "#"), - (0xFF04, "3", "$"), - (0xFF05, "3", "%"), - (0xFF06, "3", "&"), - (0xFF07, "3", "'"), - (0xFF08, "3", "("), - (0xFF09, "3", ")"), - (0xFF0A, "3", "*"), - (0xFF0B, "3", "+"), - (0xFF0C, "3", ","), - (0xFF0D, "M", "-"), - (0xFF0E, "M", "."), - (0xFF0F, "3", "/"), - (0xFF10, "M", "0"), - (0xFF11, "M", "1"), - (0xFF12, "M", "2"), - (0xFF13, "M", "3"), - (0xFF14, "M", "4"), - (0xFF15, "M", "5"), - (0xFF16, "M", "6"), - (0xFF17, "M", "7"), - (0xFF18, "M", "8"), - (0xFF19, "M", "9"), - (0xFF1A, "3", ":"), - (0xFF1B, "3", ";"), - (0xFF1C, "3", "<"), - (0xFF1D, "3", "="), - (0xFF1E, "3", ">"), - ] - - -def _seg_51() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFF1F, "3", "?"), - (0xFF20, "3", "@"), - (0xFF21, "M", "a"), - (0xFF22, "M", "b"), - (0xFF23, "M", "c"), - (0xFF24, "M", "d"), - (0xFF25, "M", "e"), - (0xFF26, "M", "f"), - (0xFF27, "M", "g"), - (0xFF28, "M", "h"), - (0xFF29, "M", "i"), - (0xFF2A, "M", "j"), - (0xFF2B, "M", "k"), - (0xFF2C, "M", "l"), - (0xFF2D, "M", "m"), - (0xFF2E, "M", "n"), - (0xFF2F, "M", "o"), - (0xFF30, "M", "p"), - (0xFF31, "M", "q"), - (0xFF32, "M", "r"), - (0xFF33, "M", "s"), - (0xFF34, "M", "t"), - (0xFF35, "M", "u"), - (0xFF36, "M", "v"), - (0xFF37, "M", "w"), - (0xFF38, "M", "x"), - (0xFF39, "M", "y"), - (0xFF3A, "M", "z"), - (0xFF3B, "3", "["), - (0xFF3C, "3", "\\"), - (0xFF3D, "3", "]"), - (0xFF3E, "3", "^"), - (0xFF3F, "3", "_"), - (0xFF40, "3", "`"), - (0xFF41, "M", "a"), - (0xFF42, "M", "b"), - (0xFF43, "M", "c"), - (0xFF44, "M", "d"), - (0xFF45, "M", "e"), - (0xFF46, "M", "f"), - (0xFF47, "M", "g"), - (0xFF48, "M", "h"), - (0xFF49, "M", "i"), - (0xFF4A, "M", "j"), - (0xFF4B, "M", "k"), - (0xFF4C, "M", "l"), - (0xFF4D, "M", "m"), - (0xFF4E, "M", "n"), - (0xFF4F, "M", "o"), - (0xFF50, "M", "p"), - (0xFF51, "M", "q"), - (0xFF52, "M", "r"), - (0xFF53, "M", "s"), - (0xFF54, "M", "t"), - (0xFF55, "M", "u"), - (0xFF56, "M", "v"), - (0xFF57, "M", "w"), - (0xFF58, "M", "x"), - (0xFF59, "M", "y"), - (0xFF5A, "M", "z"), - (0xFF5B, "3", "{"), - (0xFF5C, "3", "|"), - (0xFF5D, "3", "}"), - (0xFF5E, "3", "~"), - (0xFF5F, "M", "⦅"), - (0xFF60, "M", "⦆"), - (0xFF61, "M", "."), - (0xFF62, "M", "「"), - (0xFF63, "M", "」"), - (0xFF64, "M", "、"), - (0xFF65, "M", "・"), - (0xFF66, "M", "ヲ"), - (0xFF67, "M", "ァ"), - (0xFF68, "M", "ィ"), - (0xFF69, "M", "ゥ"), - (0xFF6A, "M", "ェ"), - (0xFF6B, "M", "ォ"), - (0xFF6C, "M", "ャ"), - (0xFF6D, "M", "ュ"), - (0xFF6E, "M", "ョ"), - (0xFF6F, "M", "ッ"), - (0xFF70, "M", "ー"), - (0xFF71, "M", "ア"), - (0xFF72, "M", "イ"), - (0xFF73, "M", "ウ"), - (0xFF74, "M", "エ"), - (0xFF75, "M", "オ"), - (0xFF76, "M", "カ"), - (0xFF77, "M", "キ"), - (0xFF78, "M", "ク"), - (0xFF79, "M", "ケ"), - (0xFF7A, "M", "コ"), - (0xFF7B, "M", "サ"), - (0xFF7C, "M", "シ"), - (0xFF7D, "M", "ス"), - (0xFF7E, "M", "セ"), - (0xFF7F, "M", "ソ"), - (0xFF80, "M", "タ"), - (0xFF81, "M", "チ"), - (0xFF82, "M", "ツ"), - ] - - -def _seg_52() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFF83, "M", "テ"), - (0xFF84, "M", "ト"), - (0xFF85, "M", "ナ"), - (0xFF86, "M", "ニ"), - (0xFF87, "M", "ヌ"), - (0xFF88, "M", "ネ"), - (0xFF89, "M", "ノ"), - (0xFF8A, "M", "ハ"), - (0xFF8B, "M", "ヒ"), - (0xFF8C, "M", "フ"), - (0xFF8D, "M", "ヘ"), - (0xFF8E, "M", "ホ"), - (0xFF8F, "M", "マ"), - (0xFF90, "M", "ミ"), - (0xFF91, "M", "ム"), - (0xFF92, "M", "メ"), - (0xFF93, "M", "モ"), - (0xFF94, "M", "ヤ"), - (0xFF95, "M", "ユ"), - (0xFF96, "M", "ヨ"), - (0xFF97, "M", "ラ"), - (0xFF98, "M", "リ"), - (0xFF99, "M", "ル"), - (0xFF9A, "M", "レ"), - (0xFF9B, "M", "ロ"), - (0xFF9C, "M", "ワ"), - (0xFF9D, "M", "ン"), - (0xFF9E, "M", "゙"), - (0xFF9F, "M", "゚"), - (0xFFA0, "X"), - (0xFFA1, "M", "ᄀ"), - (0xFFA2, "M", "ᄁ"), - (0xFFA3, "M", "ᆪ"), - (0xFFA4, "M", "ᄂ"), - (0xFFA5, "M", "ᆬ"), - (0xFFA6, "M", "ᆭ"), - (0xFFA7, "M", "ᄃ"), - (0xFFA8, "M", "ᄄ"), - (0xFFA9, "M", "ᄅ"), - (0xFFAA, "M", "ᆰ"), - (0xFFAB, "M", "ᆱ"), - (0xFFAC, "M", "ᆲ"), - (0xFFAD, "M", "ᆳ"), - (0xFFAE, "M", "ᆴ"), - (0xFFAF, "M", "ᆵ"), - (0xFFB0, "M", "ᄚ"), - (0xFFB1, "M", "ᄆ"), - (0xFFB2, "M", "ᄇ"), - (0xFFB3, "M", "ᄈ"), - (0xFFB4, "M", "ᄡ"), - (0xFFB5, "M", "ᄉ"), - (0xFFB6, "M", "ᄊ"), - (0xFFB7, "M", "ᄋ"), - (0xFFB8, "M", "ᄌ"), - (0xFFB9, "M", "ᄍ"), - (0xFFBA, "M", "ᄎ"), - (0xFFBB, "M", "ᄏ"), - (0xFFBC, "M", "ᄐ"), - (0xFFBD, "M", "ᄑ"), - (0xFFBE, "M", "ᄒ"), - (0xFFBF, "X"), - (0xFFC2, "M", "ᅡ"), - (0xFFC3, "M", "ᅢ"), - (0xFFC4, "M", "ᅣ"), - (0xFFC5, "M", "ᅤ"), - (0xFFC6, "M", "ᅥ"), - (0xFFC7, "M", "ᅦ"), - (0xFFC8, "X"), - (0xFFCA, "M", "ᅧ"), - (0xFFCB, "M", "ᅨ"), - (0xFFCC, "M", "ᅩ"), - (0xFFCD, "M", "ᅪ"), - (0xFFCE, "M", "ᅫ"), - (0xFFCF, "M", "ᅬ"), - (0xFFD0, "X"), - (0xFFD2, "M", "ᅭ"), - (0xFFD3, "M", "ᅮ"), - (0xFFD4, "M", "ᅯ"), - (0xFFD5, "M", "ᅰ"), - (0xFFD6, "M", "ᅱ"), - (0xFFD7, "M", "ᅲ"), - (0xFFD8, "X"), - (0xFFDA, "M", "ᅳ"), - (0xFFDB, "M", "ᅴ"), - (0xFFDC, "M", "ᅵ"), - (0xFFDD, "X"), - (0xFFE0, "M", "¢"), - (0xFFE1, "M", "£"), - (0xFFE2, "M", "¬"), - (0xFFE3, "3", " ̄"), - (0xFFE4, "M", "¦"), - (0xFFE5, "M", "¥"), - (0xFFE6, "M", "₩"), - (0xFFE7, "X"), - (0xFFE8, "M", "│"), - (0xFFE9, "M", "←"), - (0xFFEA, "M", "↑"), - (0xFFEB, "M", "→"), - (0xFFEC, "M", "↓"), - (0xFFED, "M", "■"), - ] - - -def _seg_53() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0xFFEE, "M", "○"), - (0xFFEF, "X"), - (0x10000, "V"), - (0x1000C, "X"), - (0x1000D, "V"), - (0x10027, "X"), - (0x10028, "V"), - (0x1003B, "X"), - (0x1003C, "V"), - (0x1003E, "X"), - (0x1003F, "V"), - (0x1004E, "X"), - (0x10050, "V"), - (0x1005E, "X"), - (0x10080, "V"), - (0x100FB, "X"), - (0x10100, "V"), - (0x10103, "X"), - (0x10107, "V"), - (0x10134, "X"), - (0x10137, "V"), - (0x1018F, "X"), - (0x10190, "V"), - (0x1019D, "X"), - (0x101A0, "V"), - (0x101A1, "X"), - (0x101D0, "V"), - (0x101FE, "X"), - (0x10280, "V"), - (0x1029D, "X"), - (0x102A0, "V"), - (0x102D1, "X"), - (0x102E0, "V"), - (0x102FC, "X"), - (0x10300, "V"), - (0x10324, "X"), - (0x1032D, "V"), - (0x1034B, "X"), - (0x10350, "V"), - (0x1037B, "X"), - (0x10380, "V"), - (0x1039E, "X"), - (0x1039F, "V"), - (0x103C4, "X"), - (0x103C8, "V"), - (0x103D6, "X"), - (0x10400, "M", "𐐨"), - (0x10401, "M", "𐐩"), - (0x10402, "M", "𐐪"), - (0x10403, "M", "𐐫"), - (0x10404, "M", "𐐬"), - (0x10405, "M", "𐐭"), - (0x10406, "M", "𐐮"), - (0x10407, "M", "𐐯"), - (0x10408, "M", "𐐰"), - (0x10409, "M", "𐐱"), - (0x1040A, "M", "𐐲"), - (0x1040B, "M", "𐐳"), - (0x1040C, "M", "𐐴"), - (0x1040D, "M", "𐐵"), - (0x1040E, "M", "𐐶"), - (0x1040F, "M", "𐐷"), - (0x10410, "M", "𐐸"), - (0x10411, "M", "𐐹"), - (0x10412, "M", "𐐺"), - (0x10413, "M", "𐐻"), - (0x10414, "M", "𐐼"), - (0x10415, "M", "𐐽"), - (0x10416, "M", "𐐾"), - (0x10417, "M", "𐐿"), - (0x10418, "M", "𐑀"), - (0x10419, "M", "𐑁"), - (0x1041A, "M", "𐑂"), - (0x1041B, "M", "𐑃"), - (0x1041C, "M", "𐑄"), - (0x1041D, "M", "𐑅"), - (0x1041E, "M", "𐑆"), - (0x1041F, "M", "𐑇"), - (0x10420, "M", "𐑈"), - (0x10421, "M", "𐑉"), - (0x10422, "M", "𐑊"), - (0x10423, "M", "𐑋"), - (0x10424, "M", "𐑌"), - (0x10425, "M", "𐑍"), - (0x10426, "M", "𐑎"), - (0x10427, "M", "𐑏"), - (0x10428, "V"), - (0x1049E, "X"), - (0x104A0, "V"), - (0x104AA, "X"), - (0x104B0, "M", "𐓘"), - (0x104B1, "M", "𐓙"), - (0x104B2, "M", "𐓚"), - (0x104B3, "M", "𐓛"), - (0x104B4, "M", "𐓜"), - (0x104B5, "M", "𐓝"), - (0x104B6, "M", "𐓞"), - (0x104B7, "M", "𐓟"), - (0x104B8, "M", "𐓠"), - (0x104B9, "M", "𐓡"), - ] - - -def _seg_54() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x104BA, "M", "𐓢"), - (0x104BB, "M", "𐓣"), - (0x104BC, "M", "𐓤"), - (0x104BD, "M", "𐓥"), - (0x104BE, "M", "𐓦"), - (0x104BF, "M", "𐓧"), - (0x104C0, "M", "𐓨"), - (0x104C1, "M", "𐓩"), - (0x104C2, "M", "𐓪"), - (0x104C3, "M", "𐓫"), - (0x104C4, "M", "𐓬"), - (0x104C5, "M", "𐓭"), - (0x104C6, "M", "𐓮"), - (0x104C7, "M", "𐓯"), - (0x104C8, "M", "𐓰"), - (0x104C9, "M", "𐓱"), - (0x104CA, "M", "𐓲"), - (0x104CB, "M", "𐓳"), - (0x104CC, "M", "𐓴"), - (0x104CD, "M", "𐓵"), - (0x104CE, "M", "𐓶"), - (0x104CF, "M", "𐓷"), - (0x104D0, "M", "𐓸"), - (0x104D1, "M", "𐓹"), - (0x104D2, "M", "𐓺"), - (0x104D3, "M", "𐓻"), - (0x104D4, "X"), - (0x104D8, "V"), - (0x104FC, "X"), - (0x10500, "V"), - (0x10528, "X"), - (0x10530, "V"), - (0x10564, "X"), - (0x1056F, "V"), - (0x10570, "M", "𐖗"), - (0x10571, "M", "𐖘"), - (0x10572, "M", "𐖙"), - (0x10573, "M", "𐖚"), - (0x10574, "M", "𐖛"), - (0x10575, "M", "𐖜"), - (0x10576, "M", "𐖝"), - (0x10577, "M", "𐖞"), - (0x10578, "M", "𐖟"), - (0x10579, "M", "𐖠"), - (0x1057A, "M", "𐖡"), - (0x1057B, "X"), - (0x1057C, "M", "𐖣"), - (0x1057D, "M", "𐖤"), - (0x1057E, "M", "𐖥"), - (0x1057F, "M", "𐖦"), - (0x10580, "M", "𐖧"), - (0x10581, "M", "𐖨"), - (0x10582, "M", "𐖩"), - (0x10583, "M", "𐖪"), - (0x10584, "M", "𐖫"), - (0x10585, "M", "𐖬"), - (0x10586, "M", "𐖭"), - (0x10587, "M", "𐖮"), - (0x10588, "M", "𐖯"), - (0x10589, "M", "𐖰"), - (0x1058A, "M", "𐖱"), - (0x1058B, "X"), - (0x1058C, "M", "𐖳"), - (0x1058D, "M", "𐖴"), - (0x1058E, "M", "𐖵"), - (0x1058F, "M", "𐖶"), - (0x10590, "M", "𐖷"), - (0x10591, "M", "𐖸"), - (0x10592, "M", "𐖹"), - (0x10593, "X"), - (0x10594, "M", "𐖻"), - (0x10595, "M", "𐖼"), - (0x10596, "X"), - (0x10597, "V"), - (0x105A2, "X"), - (0x105A3, "V"), - (0x105B2, "X"), - (0x105B3, "V"), - (0x105BA, "X"), - (0x105BB, "V"), - (0x105BD, "X"), - (0x10600, "V"), - (0x10737, "X"), - (0x10740, "V"), - (0x10756, "X"), - (0x10760, "V"), - (0x10768, "X"), - (0x10780, "V"), - (0x10781, "M", "ː"), - (0x10782, "M", "ˑ"), - (0x10783, "M", "æ"), - (0x10784, "M", "ʙ"), - (0x10785, "M", "ɓ"), - (0x10786, "X"), - (0x10787, "M", "ʣ"), - (0x10788, "M", "ꭦ"), - (0x10789, "M", "ʥ"), - (0x1078A, "M", "ʤ"), - (0x1078B, "M", "ɖ"), - (0x1078C, "M", "ɗ"), - ] - - -def _seg_55() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1078D, "M", "ᶑ"), - (0x1078E, "M", "ɘ"), - (0x1078F, "M", "ɞ"), - (0x10790, "M", "ʩ"), - (0x10791, "M", "ɤ"), - (0x10792, "M", "ɢ"), - (0x10793, "M", "ɠ"), - (0x10794, "M", "ʛ"), - (0x10795, "M", "ħ"), - (0x10796, "M", "ʜ"), - (0x10797, "M", "ɧ"), - (0x10798, "M", "ʄ"), - (0x10799, "M", "ʪ"), - (0x1079A, "M", "ʫ"), - (0x1079B, "M", "ɬ"), - (0x1079C, "M", "𝼄"), - (0x1079D, "M", "ꞎ"), - (0x1079E, "M", "ɮ"), - (0x1079F, "M", "𝼅"), - (0x107A0, "M", "ʎ"), - (0x107A1, "M", "𝼆"), - (0x107A2, "M", "ø"), - (0x107A3, "M", "ɶ"), - (0x107A4, "M", "ɷ"), - (0x107A5, "M", "q"), - (0x107A6, "M", "ɺ"), - (0x107A7, "M", "𝼈"), - (0x107A8, "M", "ɽ"), - (0x107A9, "M", "ɾ"), - (0x107AA, "M", "ʀ"), - (0x107AB, "M", "ʨ"), - (0x107AC, "M", "ʦ"), - (0x107AD, "M", "ꭧ"), - (0x107AE, "M", "ʧ"), - (0x107AF, "M", "ʈ"), - (0x107B0, "M", "ⱱ"), - (0x107B1, "X"), - (0x107B2, "M", "ʏ"), - (0x107B3, "M", "ʡ"), - (0x107B4, "M", "ʢ"), - (0x107B5, "M", "ʘ"), - (0x107B6, "M", "ǀ"), - (0x107B7, "M", "ǁ"), - (0x107B8, "M", "ǂ"), - (0x107B9, "M", "𝼊"), - (0x107BA, "M", "𝼞"), - (0x107BB, "X"), - (0x10800, "V"), - (0x10806, "X"), - (0x10808, "V"), - (0x10809, "X"), - (0x1080A, "V"), - (0x10836, "X"), - (0x10837, "V"), - (0x10839, "X"), - (0x1083C, "V"), - (0x1083D, "X"), - (0x1083F, "V"), - (0x10856, "X"), - (0x10857, "V"), - (0x1089F, "X"), - (0x108A7, "V"), - (0x108B0, "X"), - (0x108E0, "V"), - (0x108F3, "X"), - (0x108F4, "V"), - (0x108F6, "X"), - (0x108FB, "V"), - (0x1091C, "X"), - (0x1091F, "V"), - (0x1093A, "X"), - (0x1093F, "V"), - (0x10940, "X"), - (0x10980, "V"), - (0x109B8, "X"), - (0x109BC, "V"), - (0x109D0, "X"), - (0x109D2, "V"), - (0x10A04, "X"), - (0x10A05, "V"), - (0x10A07, "X"), - (0x10A0C, "V"), - (0x10A14, "X"), - (0x10A15, "V"), - (0x10A18, "X"), - (0x10A19, "V"), - (0x10A36, "X"), - (0x10A38, "V"), - (0x10A3B, "X"), - (0x10A3F, "V"), - (0x10A49, "X"), - (0x10A50, "V"), - (0x10A59, "X"), - (0x10A60, "V"), - (0x10AA0, "X"), - (0x10AC0, "V"), - (0x10AE7, "X"), - (0x10AEB, "V"), - (0x10AF7, "X"), - (0x10B00, "V"), - ] - - -def _seg_56() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x10B36, "X"), - (0x10B39, "V"), - (0x10B56, "X"), - (0x10B58, "V"), - (0x10B73, "X"), - (0x10B78, "V"), - (0x10B92, "X"), - (0x10B99, "V"), - (0x10B9D, "X"), - (0x10BA9, "V"), - (0x10BB0, "X"), - (0x10C00, "V"), - (0x10C49, "X"), - (0x10C80, "M", "𐳀"), - (0x10C81, "M", "𐳁"), - (0x10C82, "M", "𐳂"), - (0x10C83, "M", "𐳃"), - (0x10C84, "M", "𐳄"), - (0x10C85, "M", "𐳅"), - (0x10C86, "M", "𐳆"), - (0x10C87, "M", "𐳇"), - (0x10C88, "M", "𐳈"), - (0x10C89, "M", "𐳉"), - (0x10C8A, "M", "𐳊"), - (0x10C8B, "M", "𐳋"), - (0x10C8C, "M", "𐳌"), - (0x10C8D, "M", "𐳍"), - (0x10C8E, "M", "𐳎"), - (0x10C8F, "M", "𐳏"), - (0x10C90, "M", "𐳐"), - (0x10C91, "M", "𐳑"), - (0x10C92, "M", "𐳒"), - (0x10C93, "M", "𐳓"), - (0x10C94, "M", "𐳔"), - (0x10C95, "M", "𐳕"), - (0x10C96, "M", "𐳖"), - (0x10C97, "M", "𐳗"), - (0x10C98, "M", "𐳘"), - (0x10C99, "M", "𐳙"), - (0x10C9A, "M", "𐳚"), - (0x10C9B, "M", "𐳛"), - (0x10C9C, "M", "𐳜"), - (0x10C9D, "M", "𐳝"), - (0x10C9E, "M", "𐳞"), - (0x10C9F, "M", "𐳟"), - (0x10CA0, "M", "𐳠"), - (0x10CA1, "M", "𐳡"), - (0x10CA2, "M", "𐳢"), - (0x10CA3, "M", "𐳣"), - (0x10CA4, "M", "𐳤"), - (0x10CA5, "M", "𐳥"), - (0x10CA6, "M", "𐳦"), - (0x10CA7, "M", "𐳧"), - (0x10CA8, "M", "𐳨"), - (0x10CA9, "M", "𐳩"), - (0x10CAA, "M", "𐳪"), - (0x10CAB, "M", "𐳫"), - (0x10CAC, "M", "𐳬"), - (0x10CAD, "M", "𐳭"), - (0x10CAE, "M", "𐳮"), - (0x10CAF, "M", "𐳯"), - (0x10CB0, "M", "𐳰"), - (0x10CB1, "M", "𐳱"), - (0x10CB2, "M", "𐳲"), - (0x10CB3, "X"), - (0x10CC0, "V"), - (0x10CF3, "X"), - (0x10CFA, "V"), - (0x10D28, "X"), - (0x10D30, "V"), - (0x10D3A, "X"), - (0x10E60, "V"), - (0x10E7F, "X"), - (0x10E80, "V"), - (0x10EAA, "X"), - (0x10EAB, "V"), - (0x10EAE, "X"), - (0x10EB0, "V"), - (0x10EB2, "X"), - (0x10EFD, "V"), - (0x10F28, "X"), - (0x10F30, "V"), - (0x10F5A, "X"), - (0x10F70, "V"), - (0x10F8A, "X"), - (0x10FB0, "V"), - (0x10FCC, "X"), - (0x10FE0, "V"), - (0x10FF7, "X"), - (0x11000, "V"), - (0x1104E, "X"), - (0x11052, "V"), - (0x11076, "X"), - (0x1107F, "V"), - (0x110BD, "X"), - (0x110BE, "V"), - (0x110C3, "X"), - (0x110D0, "V"), - (0x110E9, "X"), - (0x110F0, "V"), - ] - - -def _seg_57() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x110FA, "X"), - (0x11100, "V"), - (0x11135, "X"), - (0x11136, "V"), - (0x11148, "X"), - (0x11150, "V"), - (0x11177, "X"), - (0x11180, "V"), - (0x111E0, "X"), - (0x111E1, "V"), - (0x111F5, "X"), - (0x11200, "V"), - (0x11212, "X"), - (0x11213, "V"), - (0x11242, "X"), - (0x11280, "V"), - (0x11287, "X"), - (0x11288, "V"), - (0x11289, "X"), - (0x1128A, "V"), - (0x1128E, "X"), - (0x1128F, "V"), - (0x1129E, "X"), - (0x1129F, "V"), - (0x112AA, "X"), - (0x112B0, "V"), - (0x112EB, "X"), - (0x112F0, "V"), - (0x112FA, "X"), - (0x11300, "V"), - (0x11304, "X"), - (0x11305, "V"), - (0x1130D, "X"), - (0x1130F, "V"), - (0x11311, "X"), - (0x11313, "V"), - (0x11329, "X"), - (0x1132A, "V"), - (0x11331, "X"), - (0x11332, "V"), - (0x11334, "X"), - (0x11335, "V"), - (0x1133A, "X"), - (0x1133B, "V"), - (0x11345, "X"), - (0x11347, "V"), - (0x11349, "X"), - (0x1134B, "V"), - (0x1134E, "X"), - (0x11350, "V"), - (0x11351, "X"), - (0x11357, "V"), - (0x11358, "X"), - (0x1135D, "V"), - (0x11364, "X"), - (0x11366, "V"), - (0x1136D, "X"), - (0x11370, "V"), - (0x11375, "X"), - (0x11400, "V"), - (0x1145C, "X"), - (0x1145D, "V"), - (0x11462, "X"), - (0x11480, "V"), - (0x114C8, "X"), - (0x114D0, "V"), - (0x114DA, "X"), - (0x11580, "V"), - (0x115B6, "X"), - (0x115B8, "V"), - (0x115DE, "X"), - (0x11600, "V"), - (0x11645, "X"), - (0x11650, "V"), - (0x1165A, "X"), - (0x11660, "V"), - (0x1166D, "X"), - (0x11680, "V"), - (0x116BA, "X"), - (0x116C0, "V"), - (0x116CA, "X"), - (0x11700, "V"), - (0x1171B, "X"), - (0x1171D, "V"), - (0x1172C, "X"), - (0x11730, "V"), - (0x11747, "X"), - (0x11800, "V"), - (0x1183C, "X"), - (0x118A0, "M", "𑣀"), - (0x118A1, "M", "𑣁"), - (0x118A2, "M", "𑣂"), - (0x118A3, "M", "𑣃"), - (0x118A4, "M", "𑣄"), - (0x118A5, "M", "𑣅"), - (0x118A6, "M", "𑣆"), - (0x118A7, "M", "𑣇"), - (0x118A8, "M", "𑣈"), - (0x118A9, "M", "𑣉"), - (0x118AA, "M", "𑣊"), - ] - - -def _seg_58() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x118AB, "M", "𑣋"), - (0x118AC, "M", "𑣌"), - (0x118AD, "M", "𑣍"), - (0x118AE, "M", "𑣎"), - (0x118AF, "M", "𑣏"), - (0x118B0, "M", "𑣐"), - (0x118B1, "M", "𑣑"), - (0x118B2, "M", "𑣒"), - (0x118B3, "M", "𑣓"), - (0x118B4, "M", "𑣔"), - (0x118B5, "M", "𑣕"), - (0x118B6, "M", "𑣖"), - (0x118B7, "M", "𑣗"), - (0x118B8, "M", "𑣘"), - (0x118B9, "M", "𑣙"), - (0x118BA, "M", "𑣚"), - (0x118BB, "M", "𑣛"), - (0x118BC, "M", "𑣜"), - (0x118BD, "M", "𑣝"), - (0x118BE, "M", "𑣞"), - (0x118BF, "M", "𑣟"), - (0x118C0, "V"), - (0x118F3, "X"), - (0x118FF, "V"), - (0x11907, "X"), - (0x11909, "V"), - (0x1190A, "X"), - (0x1190C, "V"), - (0x11914, "X"), - (0x11915, "V"), - (0x11917, "X"), - (0x11918, "V"), - (0x11936, "X"), - (0x11937, "V"), - (0x11939, "X"), - (0x1193B, "V"), - (0x11947, "X"), - (0x11950, "V"), - (0x1195A, "X"), - (0x119A0, "V"), - (0x119A8, "X"), - (0x119AA, "V"), - (0x119D8, "X"), - (0x119DA, "V"), - (0x119E5, "X"), - (0x11A00, "V"), - (0x11A48, "X"), - (0x11A50, "V"), - (0x11AA3, "X"), - (0x11AB0, "V"), - (0x11AF9, "X"), - (0x11B00, "V"), - (0x11B0A, "X"), - (0x11C00, "V"), - (0x11C09, "X"), - (0x11C0A, "V"), - (0x11C37, "X"), - (0x11C38, "V"), - (0x11C46, "X"), - (0x11C50, "V"), - (0x11C6D, "X"), - (0x11C70, "V"), - (0x11C90, "X"), - (0x11C92, "V"), - (0x11CA8, "X"), - (0x11CA9, "V"), - (0x11CB7, "X"), - (0x11D00, "V"), - (0x11D07, "X"), - (0x11D08, "V"), - (0x11D0A, "X"), - (0x11D0B, "V"), - (0x11D37, "X"), - (0x11D3A, "V"), - (0x11D3B, "X"), - (0x11D3C, "V"), - (0x11D3E, "X"), - (0x11D3F, "V"), - (0x11D48, "X"), - (0x11D50, "V"), - (0x11D5A, "X"), - (0x11D60, "V"), - (0x11D66, "X"), - (0x11D67, "V"), - (0x11D69, "X"), - (0x11D6A, "V"), - (0x11D8F, "X"), - (0x11D90, "V"), - (0x11D92, "X"), - (0x11D93, "V"), - (0x11D99, "X"), - (0x11DA0, "V"), - (0x11DAA, "X"), - (0x11EE0, "V"), - (0x11EF9, "X"), - (0x11F00, "V"), - (0x11F11, "X"), - (0x11F12, "V"), - (0x11F3B, "X"), - (0x11F3E, "V"), - ] - - -def _seg_59() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x11F5A, "X"), - (0x11FB0, "V"), - (0x11FB1, "X"), - (0x11FC0, "V"), - (0x11FF2, "X"), - (0x11FFF, "V"), - (0x1239A, "X"), - (0x12400, "V"), - (0x1246F, "X"), - (0x12470, "V"), - (0x12475, "X"), - (0x12480, "V"), - (0x12544, "X"), - (0x12F90, "V"), - (0x12FF3, "X"), - (0x13000, "V"), - (0x13430, "X"), - (0x13440, "V"), - (0x13456, "X"), - (0x14400, "V"), - (0x14647, "X"), - (0x16800, "V"), - (0x16A39, "X"), - (0x16A40, "V"), - (0x16A5F, "X"), - (0x16A60, "V"), - (0x16A6A, "X"), - (0x16A6E, "V"), - (0x16ABF, "X"), - (0x16AC0, "V"), - (0x16ACA, "X"), - (0x16AD0, "V"), - (0x16AEE, "X"), - (0x16AF0, "V"), - (0x16AF6, "X"), - (0x16B00, "V"), - (0x16B46, "X"), - (0x16B50, "V"), - (0x16B5A, "X"), - (0x16B5B, "V"), - (0x16B62, "X"), - (0x16B63, "V"), - (0x16B78, "X"), - (0x16B7D, "V"), - (0x16B90, "X"), - (0x16E40, "M", "𖹠"), - (0x16E41, "M", "𖹡"), - (0x16E42, "M", "𖹢"), - (0x16E43, "M", "𖹣"), - (0x16E44, "M", "𖹤"), - (0x16E45, "M", "𖹥"), - (0x16E46, "M", "𖹦"), - (0x16E47, "M", "𖹧"), - (0x16E48, "M", "𖹨"), - (0x16E49, "M", "𖹩"), - (0x16E4A, "M", "𖹪"), - (0x16E4B, "M", "𖹫"), - (0x16E4C, "M", "𖹬"), - (0x16E4D, "M", "𖹭"), - (0x16E4E, "M", "𖹮"), - (0x16E4F, "M", "𖹯"), - (0x16E50, "M", "𖹰"), - (0x16E51, "M", "𖹱"), - (0x16E52, "M", "𖹲"), - (0x16E53, "M", "𖹳"), - (0x16E54, "M", "𖹴"), - (0x16E55, "M", "𖹵"), - (0x16E56, "M", "𖹶"), - (0x16E57, "M", "𖹷"), - (0x16E58, "M", "𖹸"), - (0x16E59, "M", "𖹹"), - (0x16E5A, "M", "𖹺"), - (0x16E5B, "M", "𖹻"), - (0x16E5C, "M", "𖹼"), - (0x16E5D, "M", "𖹽"), - (0x16E5E, "M", "𖹾"), - (0x16E5F, "M", "𖹿"), - (0x16E60, "V"), - (0x16E9B, "X"), - (0x16F00, "V"), - (0x16F4B, "X"), - (0x16F4F, "V"), - (0x16F88, "X"), - (0x16F8F, "V"), - (0x16FA0, "X"), - (0x16FE0, "V"), - (0x16FE5, "X"), - (0x16FF0, "V"), - (0x16FF2, "X"), - (0x17000, "V"), - (0x187F8, "X"), - (0x18800, "V"), - (0x18CD6, "X"), - (0x18D00, "V"), - (0x18D09, "X"), - (0x1AFF0, "V"), - (0x1AFF4, "X"), - (0x1AFF5, "V"), - (0x1AFFC, "X"), - (0x1AFFD, "V"), - ] - - -def _seg_60() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1AFFF, "X"), - (0x1B000, "V"), - (0x1B123, "X"), - (0x1B132, "V"), - (0x1B133, "X"), - (0x1B150, "V"), - (0x1B153, "X"), - (0x1B155, "V"), - (0x1B156, "X"), - (0x1B164, "V"), - (0x1B168, "X"), - (0x1B170, "V"), - (0x1B2FC, "X"), - (0x1BC00, "V"), - (0x1BC6B, "X"), - (0x1BC70, "V"), - (0x1BC7D, "X"), - (0x1BC80, "V"), - (0x1BC89, "X"), - (0x1BC90, "V"), - (0x1BC9A, "X"), - (0x1BC9C, "V"), - (0x1BCA0, "I"), - (0x1BCA4, "X"), - (0x1CF00, "V"), - (0x1CF2E, "X"), - (0x1CF30, "V"), - (0x1CF47, "X"), - (0x1CF50, "V"), - (0x1CFC4, "X"), - (0x1D000, "V"), - (0x1D0F6, "X"), - (0x1D100, "V"), - (0x1D127, "X"), - (0x1D129, "V"), - (0x1D15E, "M", "𝅗𝅥"), - (0x1D15F, "M", "𝅘𝅥"), - (0x1D160, "M", "𝅘𝅥𝅮"), - (0x1D161, "M", "𝅘𝅥𝅯"), - (0x1D162, "M", "𝅘𝅥𝅰"), - (0x1D163, "M", "𝅘𝅥𝅱"), - (0x1D164, "M", "𝅘𝅥𝅲"), - (0x1D165, "V"), - (0x1D173, "X"), - (0x1D17B, "V"), - (0x1D1BB, "M", "𝆹𝅥"), - (0x1D1BC, "M", "𝆺𝅥"), - (0x1D1BD, "M", "𝆹𝅥𝅮"), - (0x1D1BE, "M", "𝆺𝅥𝅮"), - (0x1D1BF, "M", "𝆹𝅥𝅯"), - (0x1D1C0, "M", "𝆺𝅥𝅯"), - (0x1D1C1, "V"), - (0x1D1EB, "X"), - (0x1D200, "V"), - (0x1D246, "X"), - (0x1D2C0, "V"), - (0x1D2D4, "X"), - (0x1D2E0, "V"), - (0x1D2F4, "X"), - (0x1D300, "V"), - (0x1D357, "X"), - (0x1D360, "V"), - (0x1D379, "X"), - (0x1D400, "M", "a"), - (0x1D401, "M", "b"), - (0x1D402, "M", "c"), - (0x1D403, "M", "d"), - (0x1D404, "M", "e"), - (0x1D405, "M", "f"), - (0x1D406, "M", "g"), - (0x1D407, "M", "h"), - (0x1D408, "M", "i"), - (0x1D409, "M", "j"), - (0x1D40A, "M", "k"), - (0x1D40B, "M", "l"), - (0x1D40C, "M", "m"), - (0x1D40D, "M", "n"), - (0x1D40E, "M", "o"), - (0x1D40F, "M", "p"), - (0x1D410, "M", "q"), - (0x1D411, "M", "r"), - (0x1D412, "M", "s"), - (0x1D413, "M", "t"), - (0x1D414, "M", "u"), - (0x1D415, "M", "v"), - (0x1D416, "M", "w"), - (0x1D417, "M", "x"), - (0x1D418, "M", "y"), - (0x1D419, "M", "z"), - (0x1D41A, "M", "a"), - (0x1D41B, "M", "b"), - (0x1D41C, "M", "c"), - (0x1D41D, "M", "d"), - (0x1D41E, "M", "e"), - (0x1D41F, "M", "f"), - (0x1D420, "M", "g"), - (0x1D421, "M", "h"), - (0x1D422, "M", "i"), - (0x1D423, "M", "j"), - (0x1D424, "M", "k"), - ] - - -def _seg_61() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D425, "M", "l"), - (0x1D426, "M", "m"), - (0x1D427, "M", "n"), - (0x1D428, "M", "o"), - (0x1D429, "M", "p"), - (0x1D42A, "M", "q"), - (0x1D42B, "M", "r"), - (0x1D42C, "M", "s"), - (0x1D42D, "M", "t"), - (0x1D42E, "M", "u"), - (0x1D42F, "M", "v"), - (0x1D430, "M", "w"), - (0x1D431, "M", "x"), - (0x1D432, "M", "y"), - (0x1D433, "M", "z"), - (0x1D434, "M", "a"), - (0x1D435, "M", "b"), - (0x1D436, "M", "c"), - (0x1D437, "M", "d"), - (0x1D438, "M", "e"), - (0x1D439, "M", "f"), - (0x1D43A, "M", "g"), - (0x1D43B, "M", "h"), - (0x1D43C, "M", "i"), - (0x1D43D, "M", "j"), - (0x1D43E, "M", "k"), - (0x1D43F, "M", "l"), - (0x1D440, "M", "m"), - (0x1D441, "M", "n"), - (0x1D442, "M", "o"), - (0x1D443, "M", "p"), - (0x1D444, "M", "q"), - (0x1D445, "M", "r"), - (0x1D446, "M", "s"), - (0x1D447, "M", "t"), - (0x1D448, "M", "u"), - (0x1D449, "M", "v"), - (0x1D44A, "M", "w"), - (0x1D44B, "M", "x"), - (0x1D44C, "M", "y"), - (0x1D44D, "M", "z"), - (0x1D44E, "M", "a"), - (0x1D44F, "M", "b"), - (0x1D450, "M", "c"), - (0x1D451, "M", "d"), - (0x1D452, "M", "e"), - (0x1D453, "M", "f"), - (0x1D454, "M", "g"), - (0x1D455, "X"), - (0x1D456, "M", "i"), - (0x1D457, "M", "j"), - (0x1D458, "M", "k"), - (0x1D459, "M", "l"), - (0x1D45A, "M", "m"), - (0x1D45B, "M", "n"), - (0x1D45C, "M", "o"), - (0x1D45D, "M", "p"), - (0x1D45E, "M", "q"), - (0x1D45F, "M", "r"), - (0x1D460, "M", "s"), - (0x1D461, "M", "t"), - (0x1D462, "M", "u"), - (0x1D463, "M", "v"), - (0x1D464, "M", "w"), - (0x1D465, "M", "x"), - (0x1D466, "M", "y"), - (0x1D467, "M", "z"), - (0x1D468, "M", "a"), - (0x1D469, "M", "b"), - (0x1D46A, "M", "c"), - (0x1D46B, "M", "d"), - (0x1D46C, "M", "e"), - (0x1D46D, "M", "f"), - (0x1D46E, "M", "g"), - (0x1D46F, "M", "h"), - (0x1D470, "M", "i"), - (0x1D471, "M", "j"), - (0x1D472, "M", "k"), - (0x1D473, "M", "l"), - (0x1D474, "M", "m"), - (0x1D475, "M", "n"), - (0x1D476, "M", "o"), - (0x1D477, "M", "p"), - (0x1D478, "M", "q"), - (0x1D479, "M", "r"), - (0x1D47A, "M", "s"), - (0x1D47B, "M", "t"), - (0x1D47C, "M", "u"), - (0x1D47D, "M", "v"), - (0x1D47E, "M", "w"), - (0x1D47F, "M", "x"), - (0x1D480, "M", "y"), - (0x1D481, "M", "z"), - (0x1D482, "M", "a"), - (0x1D483, "M", "b"), - (0x1D484, "M", "c"), - (0x1D485, "M", "d"), - (0x1D486, "M", "e"), - (0x1D487, "M", "f"), - (0x1D488, "M", "g"), - ] - - -def _seg_62() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D489, "M", "h"), - (0x1D48A, "M", "i"), - (0x1D48B, "M", "j"), - (0x1D48C, "M", "k"), - (0x1D48D, "M", "l"), - (0x1D48E, "M", "m"), - (0x1D48F, "M", "n"), - (0x1D490, "M", "o"), - (0x1D491, "M", "p"), - (0x1D492, "M", "q"), - (0x1D493, "M", "r"), - (0x1D494, "M", "s"), - (0x1D495, "M", "t"), - (0x1D496, "M", "u"), - (0x1D497, "M", "v"), - (0x1D498, "M", "w"), - (0x1D499, "M", "x"), - (0x1D49A, "M", "y"), - (0x1D49B, "M", "z"), - (0x1D49C, "M", "a"), - (0x1D49D, "X"), - (0x1D49E, "M", "c"), - (0x1D49F, "M", "d"), - (0x1D4A0, "X"), - (0x1D4A2, "M", "g"), - (0x1D4A3, "X"), - (0x1D4A5, "M", "j"), - (0x1D4A6, "M", "k"), - (0x1D4A7, "X"), - (0x1D4A9, "M", "n"), - (0x1D4AA, "M", "o"), - (0x1D4AB, "M", "p"), - (0x1D4AC, "M", "q"), - (0x1D4AD, "X"), - (0x1D4AE, "M", "s"), - (0x1D4AF, "M", "t"), - (0x1D4B0, "M", "u"), - (0x1D4B1, "M", "v"), - (0x1D4B2, "M", "w"), - (0x1D4B3, "M", "x"), - (0x1D4B4, "M", "y"), - (0x1D4B5, "M", "z"), - (0x1D4B6, "M", "a"), - (0x1D4B7, "M", "b"), - (0x1D4B8, "M", "c"), - (0x1D4B9, "M", "d"), - (0x1D4BA, "X"), - (0x1D4BB, "M", "f"), - (0x1D4BC, "X"), - (0x1D4BD, "M", "h"), - (0x1D4BE, "M", "i"), - (0x1D4BF, "M", "j"), - (0x1D4C0, "M", "k"), - (0x1D4C1, "M", "l"), - (0x1D4C2, "M", "m"), - (0x1D4C3, "M", "n"), - (0x1D4C4, "X"), - (0x1D4C5, "M", "p"), - (0x1D4C6, "M", "q"), - (0x1D4C7, "M", "r"), - (0x1D4C8, "M", "s"), - (0x1D4C9, "M", "t"), - (0x1D4CA, "M", "u"), - (0x1D4CB, "M", "v"), - (0x1D4CC, "M", "w"), - (0x1D4CD, "M", "x"), - (0x1D4CE, "M", "y"), - (0x1D4CF, "M", "z"), - (0x1D4D0, "M", "a"), - (0x1D4D1, "M", "b"), - (0x1D4D2, "M", "c"), - (0x1D4D3, "M", "d"), - (0x1D4D4, "M", "e"), - (0x1D4D5, "M", "f"), - (0x1D4D6, "M", "g"), - (0x1D4D7, "M", "h"), - (0x1D4D8, "M", "i"), - (0x1D4D9, "M", "j"), - (0x1D4DA, "M", "k"), - (0x1D4DB, "M", "l"), - (0x1D4DC, "M", "m"), - (0x1D4DD, "M", "n"), - (0x1D4DE, "M", "o"), - (0x1D4DF, "M", "p"), - (0x1D4E0, "M", "q"), - (0x1D4E1, "M", "r"), - (0x1D4E2, "M", "s"), - (0x1D4E3, "M", "t"), - (0x1D4E4, "M", "u"), - (0x1D4E5, "M", "v"), - (0x1D4E6, "M", "w"), - (0x1D4E7, "M", "x"), - (0x1D4E8, "M", "y"), - (0x1D4E9, "M", "z"), - (0x1D4EA, "M", "a"), - (0x1D4EB, "M", "b"), - (0x1D4EC, "M", "c"), - (0x1D4ED, "M", "d"), - (0x1D4EE, "M", "e"), - (0x1D4EF, "M", "f"), - ] - - -def _seg_63() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D4F0, "M", "g"), - (0x1D4F1, "M", "h"), - (0x1D4F2, "M", "i"), - (0x1D4F3, "M", "j"), - (0x1D4F4, "M", "k"), - (0x1D4F5, "M", "l"), - (0x1D4F6, "M", "m"), - (0x1D4F7, "M", "n"), - (0x1D4F8, "M", "o"), - (0x1D4F9, "M", "p"), - (0x1D4FA, "M", "q"), - (0x1D4FB, "M", "r"), - (0x1D4FC, "M", "s"), - (0x1D4FD, "M", "t"), - (0x1D4FE, "M", "u"), - (0x1D4FF, "M", "v"), - (0x1D500, "M", "w"), - (0x1D501, "M", "x"), - (0x1D502, "M", "y"), - (0x1D503, "M", "z"), - (0x1D504, "M", "a"), - (0x1D505, "M", "b"), - (0x1D506, "X"), - (0x1D507, "M", "d"), - (0x1D508, "M", "e"), - (0x1D509, "M", "f"), - (0x1D50A, "M", "g"), - (0x1D50B, "X"), - (0x1D50D, "M", "j"), - (0x1D50E, "M", "k"), - (0x1D50F, "M", "l"), - (0x1D510, "M", "m"), - (0x1D511, "M", "n"), - (0x1D512, "M", "o"), - (0x1D513, "M", "p"), - (0x1D514, "M", "q"), - (0x1D515, "X"), - (0x1D516, "M", "s"), - (0x1D517, "M", "t"), - (0x1D518, "M", "u"), - (0x1D519, "M", "v"), - (0x1D51A, "M", "w"), - (0x1D51B, "M", "x"), - (0x1D51C, "M", "y"), - (0x1D51D, "X"), - (0x1D51E, "M", "a"), - (0x1D51F, "M", "b"), - (0x1D520, "M", "c"), - (0x1D521, "M", "d"), - (0x1D522, "M", "e"), - (0x1D523, "M", "f"), - (0x1D524, "M", "g"), - (0x1D525, "M", "h"), - (0x1D526, "M", "i"), - (0x1D527, "M", "j"), - (0x1D528, "M", "k"), - (0x1D529, "M", "l"), - (0x1D52A, "M", "m"), - (0x1D52B, "M", "n"), - (0x1D52C, "M", "o"), - (0x1D52D, "M", "p"), - (0x1D52E, "M", "q"), - (0x1D52F, "M", "r"), - (0x1D530, "M", "s"), - (0x1D531, "M", "t"), - (0x1D532, "M", "u"), - (0x1D533, "M", "v"), - (0x1D534, "M", "w"), - (0x1D535, "M", "x"), - (0x1D536, "M", "y"), - (0x1D537, "M", "z"), - (0x1D538, "M", "a"), - (0x1D539, "M", "b"), - (0x1D53A, "X"), - (0x1D53B, "M", "d"), - (0x1D53C, "M", "e"), - (0x1D53D, "M", "f"), - (0x1D53E, "M", "g"), - (0x1D53F, "X"), - (0x1D540, "M", "i"), - (0x1D541, "M", "j"), - (0x1D542, "M", "k"), - (0x1D543, "M", "l"), - (0x1D544, "M", "m"), - (0x1D545, "X"), - (0x1D546, "M", "o"), - (0x1D547, "X"), - (0x1D54A, "M", "s"), - (0x1D54B, "M", "t"), - (0x1D54C, "M", "u"), - (0x1D54D, "M", "v"), - (0x1D54E, "M", "w"), - (0x1D54F, "M", "x"), - (0x1D550, "M", "y"), - (0x1D551, "X"), - (0x1D552, "M", "a"), - (0x1D553, "M", "b"), - (0x1D554, "M", "c"), - (0x1D555, "M", "d"), - (0x1D556, "M", "e"), - ] - - -def _seg_64() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D557, "M", "f"), - (0x1D558, "M", "g"), - (0x1D559, "M", "h"), - (0x1D55A, "M", "i"), - (0x1D55B, "M", "j"), - (0x1D55C, "M", "k"), - (0x1D55D, "M", "l"), - (0x1D55E, "M", "m"), - (0x1D55F, "M", "n"), - (0x1D560, "M", "o"), - (0x1D561, "M", "p"), - (0x1D562, "M", "q"), - (0x1D563, "M", "r"), - (0x1D564, "M", "s"), - (0x1D565, "M", "t"), - (0x1D566, "M", "u"), - (0x1D567, "M", "v"), - (0x1D568, "M", "w"), - (0x1D569, "M", "x"), - (0x1D56A, "M", "y"), - (0x1D56B, "M", "z"), - (0x1D56C, "M", "a"), - (0x1D56D, "M", "b"), - (0x1D56E, "M", "c"), - (0x1D56F, "M", "d"), - (0x1D570, "M", "e"), - (0x1D571, "M", "f"), - (0x1D572, "M", "g"), - (0x1D573, "M", "h"), - (0x1D574, "M", "i"), - (0x1D575, "M", "j"), - (0x1D576, "M", "k"), - (0x1D577, "M", "l"), - (0x1D578, "M", "m"), - (0x1D579, "M", "n"), - (0x1D57A, "M", "o"), - (0x1D57B, "M", "p"), - (0x1D57C, "M", "q"), - (0x1D57D, "M", "r"), - (0x1D57E, "M", "s"), - (0x1D57F, "M", "t"), - (0x1D580, "M", "u"), - (0x1D581, "M", "v"), - (0x1D582, "M", "w"), - (0x1D583, "M", "x"), - (0x1D584, "M", "y"), - (0x1D585, "M", "z"), - (0x1D586, "M", "a"), - (0x1D587, "M", "b"), - (0x1D588, "M", "c"), - (0x1D589, "M", "d"), - (0x1D58A, "M", "e"), - (0x1D58B, "M", "f"), - (0x1D58C, "M", "g"), - (0x1D58D, "M", "h"), - (0x1D58E, "M", "i"), - (0x1D58F, "M", "j"), - (0x1D590, "M", "k"), - (0x1D591, "M", "l"), - (0x1D592, "M", "m"), - (0x1D593, "M", "n"), - (0x1D594, "M", "o"), - (0x1D595, "M", "p"), - (0x1D596, "M", "q"), - (0x1D597, "M", "r"), - (0x1D598, "M", "s"), - (0x1D599, "M", "t"), - (0x1D59A, "M", "u"), - (0x1D59B, "M", "v"), - (0x1D59C, "M", "w"), - (0x1D59D, "M", "x"), - (0x1D59E, "M", "y"), - (0x1D59F, "M", "z"), - (0x1D5A0, "M", "a"), - (0x1D5A1, "M", "b"), - (0x1D5A2, "M", "c"), - (0x1D5A3, "M", "d"), - (0x1D5A4, "M", "e"), - (0x1D5A5, "M", "f"), - (0x1D5A6, "M", "g"), - (0x1D5A7, "M", "h"), - (0x1D5A8, "M", "i"), - (0x1D5A9, "M", "j"), - (0x1D5AA, "M", "k"), - (0x1D5AB, "M", "l"), - (0x1D5AC, "M", "m"), - (0x1D5AD, "M", "n"), - (0x1D5AE, "M", "o"), - (0x1D5AF, "M", "p"), - (0x1D5B0, "M", "q"), - (0x1D5B1, "M", "r"), - (0x1D5B2, "M", "s"), - (0x1D5B3, "M", "t"), - (0x1D5B4, "M", "u"), - (0x1D5B5, "M", "v"), - (0x1D5B6, "M", "w"), - (0x1D5B7, "M", "x"), - (0x1D5B8, "M", "y"), - (0x1D5B9, "M", "z"), - (0x1D5BA, "M", "a"), - ] - - -def _seg_65() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D5BB, "M", "b"), - (0x1D5BC, "M", "c"), - (0x1D5BD, "M", "d"), - (0x1D5BE, "M", "e"), - (0x1D5BF, "M", "f"), - (0x1D5C0, "M", "g"), - (0x1D5C1, "M", "h"), - (0x1D5C2, "M", "i"), - (0x1D5C3, "M", "j"), - (0x1D5C4, "M", "k"), - (0x1D5C5, "M", "l"), - (0x1D5C6, "M", "m"), - (0x1D5C7, "M", "n"), - (0x1D5C8, "M", "o"), - (0x1D5C9, "M", "p"), - (0x1D5CA, "M", "q"), - (0x1D5CB, "M", "r"), - (0x1D5CC, "M", "s"), - (0x1D5CD, "M", "t"), - (0x1D5CE, "M", "u"), - (0x1D5CF, "M", "v"), - (0x1D5D0, "M", "w"), - (0x1D5D1, "M", "x"), - (0x1D5D2, "M", "y"), - (0x1D5D3, "M", "z"), - (0x1D5D4, "M", "a"), - (0x1D5D5, "M", "b"), - (0x1D5D6, "M", "c"), - (0x1D5D7, "M", "d"), - (0x1D5D8, "M", "e"), - (0x1D5D9, "M", "f"), - (0x1D5DA, "M", "g"), - (0x1D5DB, "M", "h"), - (0x1D5DC, "M", "i"), - (0x1D5DD, "M", "j"), - (0x1D5DE, "M", "k"), - (0x1D5DF, "M", "l"), - (0x1D5E0, "M", "m"), - (0x1D5E1, "M", "n"), - (0x1D5E2, "M", "o"), - (0x1D5E3, "M", "p"), - (0x1D5E4, "M", "q"), - (0x1D5E5, "M", "r"), - (0x1D5E6, "M", "s"), - (0x1D5E7, "M", "t"), - (0x1D5E8, "M", "u"), - (0x1D5E9, "M", "v"), - (0x1D5EA, "M", "w"), - (0x1D5EB, "M", "x"), - (0x1D5EC, "M", "y"), - (0x1D5ED, "M", "z"), - (0x1D5EE, "M", "a"), - (0x1D5EF, "M", "b"), - (0x1D5F0, "M", "c"), - (0x1D5F1, "M", "d"), - (0x1D5F2, "M", "e"), - (0x1D5F3, "M", "f"), - (0x1D5F4, "M", "g"), - (0x1D5F5, "M", "h"), - (0x1D5F6, "M", "i"), - (0x1D5F7, "M", "j"), - (0x1D5F8, "M", "k"), - (0x1D5F9, "M", "l"), - (0x1D5FA, "M", "m"), - (0x1D5FB, "M", "n"), - (0x1D5FC, "M", "o"), - (0x1D5FD, "M", "p"), - (0x1D5FE, "M", "q"), - (0x1D5FF, "M", "r"), - (0x1D600, "M", "s"), - (0x1D601, "M", "t"), - (0x1D602, "M", "u"), - (0x1D603, "M", "v"), - (0x1D604, "M", "w"), - (0x1D605, "M", "x"), - (0x1D606, "M", "y"), - (0x1D607, "M", "z"), - (0x1D608, "M", "a"), - (0x1D609, "M", "b"), - (0x1D60A, "M", "c"), - (0x1D60B, "M", "d"), - (0x1D60C, "M", "e"), - (0x1D60D, "M", "f"), - (0x1D60E, "M", "g"), - (0x1D60F, "M", "h"), - (0x1D610, "M", "i"), - (0x1D611, "M", "j"), - (0x1D612, "M", "k"), - (0x1D613, "M", "l"), - (0x1D614, "M", "m"), - (0x1D615, "M", "n"), - (0x1D616, "M", "o"), - (0x1D617, "M", "p"), - (0x1D618, "M", "q"), - (0x1D619, "M", "r"), - (0x1D61A, "M", "s"), - (0x1D61B, "M", "t"), - (0x1D61C, "M", "u"), - (0x1D61D, "M", "v"), - (0x1D61E, "M", "w"), - ] - - -def _seg_66() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D61F, "M", "x"), - (0x1D620, "M", "y"), - (0x1D621, "M", "z"), - (0x1D622, "M", "a"), - (0x1D623, "M", "b"), - (0x1D624, "M", "c"), - (0x1D625, "M", "d"), - (0x1D626, "M", "e"), - (0x1D627, "M", "f"), - (0x1D628, "M", "g"), - (0x1D629, "M", "h"), - (0x1D62A, "M", "i"), - (0x1D62B, "M", "j"), - (0x1D62C, "M", "k"), - (0x1D62D, "M", "l"), - (0x1D62E, "M", "m"), - (0x1D62F, "M", "n"), - (0x1D630, "M", "o"), - (0x1D631, "M", "p"), - (0x1D632, "M", "q"), - (0x1D633, "M", "r"), - (0x1D634, "M", "s"), - (0x1D635, "M", "t"), - (0x1D636, "M", "u"), - (0x1D637, "M", "v"), - (0x1D638, "M", "w"), - (0x1D639, "M", "x"), - (0x1D63A, "M", "y"), - (0x1D63B, "M", "z"), - (0x1D63C, "M", "a"), - (0x1D63D, "M", "b"), - (0x1D63E, "M", "c"), - (0x1D63F, "M", "d"), - (0x1D640, "M", "e"), - (0x1D641, "M", "f"), - (0x1D642, "M", "g"), - (0x1D643, "M", "h"), - (0x1D644, "M", "i"), - (0x1D645, "M", "j"), - (0x1D646, "M", "k"), - (0x1D647, "M", "l"), - (0x1D648, "M", "m"), - (0x1D649, "M", "n"), - (0x1D64A, "M", "o"), - (0x1D64B, "M", "p"), - (0x1D64C, "M", "q"), - (0x1D64D, "M", "r"), - (0x1D64E, "M", "s"), - (0x1D64F, "M", "t"), - (0x1D650, "M", "u"), - (0x1D651, "M", "v"), - (0x1D652, "M", "w"), - (0x1D653, "M", "x"), - (0x1D654, "M", "y"), - (0x1D655, "M", "z"), - (0x1D656, "M", "a"), - (0x1D657, "M", "b"), - (0x1D658, "M", "c"), - (0x1D659, "M", "d"), - (0x1D65A, "M", "e"), - (0x1D65B, "M", "f"), - (0x1D65C, "M", "g"), - (0x1D65D, "M", "h"), - (0x1D65E, "M", "i"), - (0x1D65F, "M", "j"), - (0x1D660, "M", "k"), - (0x1D661, "M", "l"), - (0x1D662, "M", "m"), - (0x1D663, "M", "n"), - (0x1D664, "M", "o"), - (0x1D665, "M", "p"), - (0x1D666, "M", "q"), - (0x1D667, "M", "r"), - (0x1D668, "M", "s"), - (0x1D669, "M", "t"), - (0x1D66A, "M", "u"), - (0x1D66B, "M", "v"), - (0x1D66C, "M", "w"), - (0x1D66D, "M", "x"), - (0x1D66E, "M", "y"), - (0x1D66F, "M", "z"), - (0x1D670, "M", "a"), - (0x1D671, "M", "b"), - (0x1D672, "M", "c"), - (0x1D673, "M", "d"), - (0x1D674, "M", "e"), - (0x1D675, "M", "f"), - (0x1D676, "M", "g"), - (0x1D677, "M", "h"), - (0x1D678, "M", "i"), - (0x1D679, "M", "j"), - (0x1D67A, "M", "k"), - (0x1D67B, "M", "l"), - (0x1D67C, "M", "m"), - (0x1D67D, "M", "n"), - (0x1D67E, "M", "o"), - (0x1D67F, "M", "p"), - (0x1D680, "M", "q"), - (0x1D681, "M", "r"), - (0x1D682, "M", "s"), - ] - - -def _seg_67() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D683, "M", "t"), - (0x1D684, "M", "u"), - (0x1D685, "M", "v"), - (0x1D686, "M", "w"), - (0x1D687, "M", "x"), - (0x1D688, "M", "y"), - (0x1D689, "M", "z"), - (0x1D68A, "M", "a"), - (0x1D68B, "M", "b"), - (0x1D68C, "M", "c"), - (0x1D68D, "M", "d"), - (0x1D68E, "M", "e"), - (0x1D68F, "M", "f"), - (0x1D690, "M", "g"), - (0x1D691, "M", "h"), - (0x1D692, "M", "i"), - (0x1D693, "M", "j"), - (0x1D694, "M", "k"), - (0x1D695, "M", "l"), - (0x1D696, "M", "m"), - (0x1D697, "M", "n"), - (0x1D698, "M", "o"), - (0x1D699, "M", "p"), - (0x1D69A, "M", "q"), - (0x1D69B, "M", "r"), - (0x1D69C, "M", "s"), - (0x1D69D, "M", "t"), - (0x1D69E, "M", "u"), - (0x1D69F, "M", "v"), - (0x1D6A0, "M", "w"), - (0x1D6A1, "M", "x"), - (0x1D6A2, "M", "y"), - (0x1D6A3, "M", "z"), - (0x1D6A4, "M", "ı"), - (0x1D6A5, "M", "ȷ"), - (0x1D6A6, "X"), - (0x1D6A8, "M", "α"), - (0x1D6A9, "M", "β"), - (0x1D6AA, "M", "γ"), - (0x1D6AB, "M", "δ"), - (0x1D6AC, "M", "ε"), - (0x1D6AD, "M", "ζ"), - (0x1D6AE, "M", "η"), - (0x1D6AF, "M", "θ"), - (0x1D6B0, "M", "ι"), - (0x1D6B1, "M", "κ"), - (0x1D6B2, "M", "λ"), - (0x1D6B3, "M", "μ"), - (0x1D6B4, "M", "ν"), - (0x1D6B5, "M", "ξ"), - (0x1D6B6, "M", "ο"), - (0x1D6B7, "M", "π"), - (0x1D6B8, "M", "ρ"), - (0x1D6B9, "M", "θ"), - (0x1D6BA, "M", "σ"), - (0x1D6BB, "M", "τ"), - (0x1D6BC, "M", "υ"), - (0x1D6BD, "M", "φ"), - (0x1D6BE, "M", "χ"), - (0x1D6BF, "M", "ψ"), - (0x1D6C0, "M", "ω"), - (0x1D6C1, "M", "∇"), - (0x1D6C2, "M", "α"), - (0x1D6C3, "M", "β"), - (0x1D6C4, "M", "γ"), - (0x1D6C5, "M", "δ"), - (0x1D6C6, "M", "ε"), - (0x1D6C7, "M", "ζ"), - (0x1D6C8, "M", "η"), - (0x1D6C9, "M", "θ"), - (0x1D6CA, "M", "ι"), - (0x1D6CB, "M", "κ"), - (0x1D6CC, "M", "λ"), - (0x1D6CD, "M", "μ"), - (0x1D6CE, "M", "ν"), - (0x1D6CF, "M", "ξ"), - (0x1D6D0, "M", "ο"), - (0x1D6D1, "M", "π"), - (0x1D6D2, "M", "ρ"), - (0x1D6D3, "M", "σ"), - (0x1D6D5, "M", "τ"), - (0x1D6D6, "M", "υ"), - (0x1D6D7, "M", "φ"), - (0x1D6D8, "M", "χ"), - (0x1D6D9, "M", "ψ"), - (0x1D6DA, "M", "ω"), - (0x1D6DB, "M", "∂"), - (0x1D6DC, "M", "ε"), - (0x1D6DD, "M", "θ"), - (0x1D6DE, "M", "κ"), - (0x1D6DF, "M", "φ"), - (0x1D6E0, "M", "ρ"), - (0x1D6E1, "M", "π"), - (0x1D6E2, "M", "α"), - (0x1D6E3, "M", "β"), - (0x1D6E4, "M", "γ"), - (0x1D6E5, "M", "δ"), - (0x1D6E6, "M", "ε"), - (0x1D6E7, "M", "ζ"), - (0x1D6E8, "M", "η"), - ] - - -def _seg_68() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D6E9, "M", "θ"), - (0x1D6EA, "M", "ι"), - (0x1D6EB, "M", "κ"), - (0x1D6EC, "M", "λ"), - (0x1D6ED, "M", "μ"), - (0x1D6EE, "M", "ν"), - (0x1D6EF, "M", "ξ"), - (0x1D6F0, "M", "ο"), - (0x1D6F1, "M", "π"), - (0x1D6F2, "M", "ρ"), - (0x1D6F3, "M", "θ"), - (0x1D6F4, "M", "σ"), - (0x1D6F5, "M", "τ"), - (0x1D6F6, "M", "υ"), - (0x1D6F7, "M", "φ"), - (0x1D6F8, "M", "χ"), - (0x1D6F9, "M", "ψ"), - (0x1D6FA, "M", "ω"), - (0x1D6FB, "M", "∇"), - (0x1D6FC, "M", "α"), - (0x1D6FD, "M", "β"), - (0x1D6FE, "M", "γ"), - (0x1D6FF, "M", "δ"), - (0x1D700, "M", "ε"), - (0x1D701, "M", "ζ"), - (0x1D702, "M", "η"), - (0x1D703, "M", "θ"), - (0x1D704, "M", "ι"), - (0x1D705, "M", "κ"), - (0x1D706, "M", "λ"), - (0x1D707, "M", "μ"), - (0x1D708, "M", "ν"), - (0x1D709, "M", "ξ"), - (0x1D70A, "M", "ο"), - (0x1D70B, "M", "π"), - (0x1D70C, "M", "ρ"), - (0x1D70D, "M", "σ"), - (0x1D70F, "M", "τ"), - (0x1D710, "M", "υ"), - (0x1D711, "M", "φ"), - (0x1D712, "M", "χ"), - (0x1D713, "M", "ψ"), - (0x1D714, "M", "ω"), - (0x1D715, "M", "∂"), - (0x1D716, "M", "ε"), - (0x1D717, "M", "θ"), - (0x1D718, "M", "κ"), - (0x1D719, "M", "φ"), - (0x1D71A, "M", "ρ"), - (0x1D71B, "M", "π"), - (0x1D71C, "M", "α"), - (0x1D71D, "M", "β"), - (0x1D71E, "M", "γ"), - (0x1D71F, "M", "δ"), - (0x1D720, "M", "ε"), - (0x1D721, "M", "ζ"), - (0x1D722, "M", "η"), - (0x1D723, "M", "θ"), - (0x1D724, "M", "ι"), - (0x1D725, "M", "κ"), - (0x1D726, "M", "λ"), - (0x1D727, "M", "μ"), - (0x1D728, "M", "ν"), - (0x1D729, "M", "ξ"), - (0x1D72A, "M", "ο"), - (0x1D72B, "M", "π"), - (0x1D72C, "M", "ρ"), - (0x1D72D, "M", "θ"), - (0x1D72E, "M", "σ"), - (0x1D72F, "M", "τ"), - (0x1D730, "M", "υ"), - (0x1D731, "M", "φ"), - (0x1D732, "M", "χ"), - (0x1D733, "M", "ψ"), - (0x1D734, "M", "ω"), - (0x1D735, "M", "∇"), - (0x1D736, "M", "α"), - (0x1D737, "M", "β"), - (0x1D738, "M", "γ"), - (0x1D739, "M", "δ"), - (0x1D73A, "M", "ε"), - (0x1D73B, "M", "ζ"), - (0x1D73C, "M", "η"), - (0x1D73D, "M", "θ"), - (0x1D73E, "M", "ι"), - (0x1D73F, "M", "κ"), - (0x1D740, "M", "λ"), - (0x1D741, "M", "μ"), - (0x1D742, "M", "ν"), - (0x1D743, "M", "ξ"), - (0x1D744, "M", "ο"), - (0x1D745, "M", "π"), - (0x1D746, "M", "ρ"), - (0x1D747, "M", "σ"), - (0x1D749, "M", "τ"), - (0x1D74A, "M", "υ"), - (0x1D74B, "M", "φ"), - (0x1D74C, "M", "χ"), - (0x1D74D, "M", "ψ"), - (0x1D74E, "M", "ω"), - ] - - -def _seg_69() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D74F, "M", "∂"), - (0x1D750, "M", "ε"), - (0x1D751, "M", "θ"), - (0x1D752, "M", "κ"), - (0x1D753, "M", "φ"), - (0x1D754, "M", "ρ"), - (0x1D755, "M", "π"), - (0x1D756, "M", "α"), - (0x1D757, "M", "β"), - (0x1D758, "M", "γ"), - (0x1D759, "M", "δ"), - (0x1D75A, "M", "ε"), - (0x1D75B, "M", "ζ"), - (0x1D75C, "M", "η"), - (0x1D75D, "M", "θ"), - (0x1D75E, "M", "ι"), - (0x1D75F, "M", "κ"), - (0x1D760, "M", "λ"), - (0x1D761, "M", "μ"), - (0x1D762, "M", "ν"), - (0x1D763, "M", "ξ"), - (0x1D764, "M", "ο"), - (0x1D765, "M", "π"), - (0x1D766, "M", "ρ"), - (0x1D767, "M", "θ"), - (0x1D768, "M", "σ"), - (0x1D769, "M", "τ"), - (0x1D76A, "M", "υ"), - (0x1D76B, "M", "φ"), - (0x1D76C, "M", "χ"), - (0x1D76D, "M", "ψ"), - (0x1D76E, "M", "ω"), - (0x1D76F, "M", "∇"), - (0x1D770, "M", "α"), - (0x1D771, "M", "β"), - (0x1D772, "M", "γ"), - (0x1D773, "M", "δ"), - (0x1D774, "M", "ε"), - (0x1D775, "M", "ζ"), - (0x1D776, "M", "η"), - (0x1D777, "M", "θ"), - (0x1D778, "M", "ι"), - (0x1D779, "M", "κ"), - (0x1D77A, "M", "λ"), - (0x1D77B, "M", "μ"), - (0x1D77C, "M", "ν"), - (0x1D77D, "M", "ξ"), - (0x1D77E, "M", "ο"), - (0x1D77F, "M", "π"), - (0x1D780, "M", "ρ"), - (0x1D781, "M", "σ"), - (0x1D783, "M", "τ"), - (0x1D784, "M", "υ"), - (0x1D785, "M", "φ"), - (0x1D786, "M", "χ"), - (0x1D787, "M", "ψ"), - (0x1D788, "M", "ω"), - (0x1D789, "M", "∂"), - (0x1D78A, "M", "ε"), - (0x1D78B, "M", "θ"), - (0x1D78C, "M", "κ"), - (0x1D78D, "M", "φ"), - (0x1D78E, "M", "ρ"), - (0x1D78F, "M", "π"), - (0x1D790, "M", "α"), - (0x1D791, "M", "β"), - (0x1D792, "M", "γ"), - (0x1D793, "M", "δ"), - (0x1D794, "M", "ε"), - (0x1D795, "M", "ζ"), - (0x1D796, "M", "η"), - (0x1D797, "M", "θ"), - (0x1D798, "M", "ι"), - (0x1D799, "M", "κ"), - (0x1D79A, "M", "λ"), - (0x1D79B, "M", "μ"), - (0x1D79C, "M", "ν"), - (0x1D79D, "M", "ξ"), - (0x1D79E, "M", "ο"), - (0x1D79F, "M", "π"), - (0x1D7A0, "M", "ρ"), - (0x1D7A1, "M", "θ"), - (0x1D7A2, "M", "σ"), - (0x1D7A3, "M", "τ"), - (0x1D7A4, "M", "υ"), - (0x1D7A5, "M", "φ"), - (0x1D7A6, "M", "χ"), - (0x1D7A7, "M", "ψ"), - (0x1D7A8, "M", "ω"), - (0x1D7A9, "M", "∇"), - (0x1D7AA, "M", "α"), - (0x1D7AB, "M", "β"), - (0x1D7AC, "M", "γ"), - (0x1D7AD, "M", "δ"), - (0x1D7AE, "M", "ε"), - (0x1D7AF, "M", "ζ"), - (0x1D7B0, "M", "η"), - (0x1D7B1, "M", "θ"), - (0x1D7B2, "M", "ι"), - (0x1D7B3, "M", "κ"), - ] - - -def _seg_70() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1D7B4, "M", "λ"), - (0x1D7B5, "M", "μ"), - (0x1D7B6, "M", "ν"), - (0x1D7B7, "M", "ξ"), - (0x1D7B8, "M", "ο"), - (0x1D7B9, "M", "π"), - (0x1D7BA, "M", "ρ"), - (0x1D7BB, "M", "σ"), - (0x1D7BD, "M", "τ"), - (0x1D7BE, "M", "υ"), - (0x1D7BF, "M", "φ"), - (0x1D7C0, "M", "χ"), - (0x1D7C1, "M", "ψ"), - (0x1D7C2, "M", "ω"), - (0x1D7C3, "M", "∂"), - (0x1D7C4, "M", "ε"), - (0x1D7C5, "M", "θ"), - (0x1D7C6, "M", "κ"), - (0x1D7C7, "M", "φ"), - (0x1D7C8, "M", "ρ"), - (0x1D7C9, "M", "π"), - (0x1D7CA, "M", "ϝ"), - (0x1D7CC, "X"), - (0x1D7CE, "M", "0"), - (0x1D7CF, "M", "1"), - (0x1D7D0, "M", "2"), - (0x1D7D1, "M", "3"), - (0x1D7D2, "M", "4"), - (0x1D7D3, "M", "5"), - (0x1D7D4, "M", "6"), - (0x1D7D5, "M", "7"), - (0x1D7D6, "M", "8"), - (0x1D7D7, "M", "9"), - (0x1D7D8, "M", "0"), - (0x1D7D9, "M", "1"), - (0x1D7DA, "M", "2"), - (0x1D7DB, "M", "3"), - (0x1D7DC, "M", "4"), - (0x1D7DD, "M", "5"), - (0x1D7DE, "M", "6"), - (0x1D7DF, "M", "7"), - (0x1D7E0, "M", "8"), - (0x1D7E1, "M", "9"), - (0x1D7E2, "M", "0"), - (0x1D7E3, "M", "1"), - (0x1D7E4, "M", "2"), - (0x1D7E5, "M", "3"), - (0x1D7E6, "M", "4"), - (0x1D7E7, "M", "5"), - (0x1D7E8, "M", "6"), - (0x1D7E9, "M", "7"), - (0x1D7EA, "M", "8"), - (0x1D7EB, "M", "9"), - (0x1D7EC, "M", "0"), - (0x1D7ED, "M", "1"), - (0x1D7EE, "M", "2"), - (0x1D7EF, "M", "3"), - (0x1D7F0, "M", "4"), - (0x1D7F1, "M", "5"), - (0x1D7F2, "M", "6"), - (0x1D7F3, "M", "7"), - (0x1D7F4, "M", "8"), - (0x1D7F5, "M", "9"), - (0x1D7F6, "M", "0"), - (0x1D7F7, "M", "1"), - (0x1D7F8, "M", "2"), - (0x1D7F9, "M", "3"), - (0x1D7FA, "M", "4"), - (0x1D7FB, "M", "5"), - (0x1D7FC, "M", "6"), - (0x1D7FD, "M", "7"), - (0x1D7FE, "M", "8"), - (0x1D7FF, "M", "9"), - (0x1D800, "V"), - (0x1DA8C, "X"), - (0x1DA9B, "V"), - (0x1DAA0, "X"), - (0x1DAA1, "V"), - (0x1DAB0, "X"), - (0x1DF00, "V"), - (0x1DF1F, "X"), - (0x1DF25, "V"), - (0x1DF2B, "X"), - (0x1E000, "V"), - (0x1E007, "X"), - (0x1E008, "V"), - (0x1E019, "X"), - (0x1E01B, "V"), - (0x1E022, "X"), - (0x1E023, "V"), - (0x1E025, "X"), - (0x1E026, "V"), - (0x1E02B, "X"), - (0x1E030, "M", "а"), - (0x1E031, "M", "б"), - (0x1E032, "M", "в"), - (0x1E033, "M", "г"), - (0x1E034, "M", "д"), - (0x1E035, "M", "е"), - (0x1E036, "M", "ж"), - ] - - -def _seg_71() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E037, "M", "з"), - (0x1E038, "M", "и"), - (0x1E039, "M", "к"), - (0x1E03A, "M", "л"), - (0x1E03B, "M", "м"), - (0x1E03C, "M", "о"), - (0x1E03D, "M", "п"), - (0x1E03E, "M", "р"), - (0x1E03F, "M", "с"), - (0x1E040, "M", "т"), - (0x1E041, "M", "у"), - (0x1E042, "M", "ф"), - (0x1E043, "M", "х"), - (0x1E044, "M", "ц"), - (0x1E045, "M", "ч"), - (0x1E046, "M", "ш"), - (0x1E047, "M", "ы"), - (0x1E048, "M", "э"), - (0x1E049, "M", "ю"), - (0x1E04A, "M", "ꚉ"), - (0x1E04B, "M", "ә"), - (0x1E04C, "M", "і"), - (0x1E04D, "M", "ј"), - (0x1E04E, "M", "ө"), - (0x1E04F, "M", "ү"), - (0x1E050, "M", "ӏ"), - (0x1E051, "M", "а"), - (0x1E052, "M", "б"), - (0x1E053, "M", "в"), - (0x1E054, "M", "г"), - (0x1E055, "M", "д"), - (0x1E056, "M", "е"), - (0x1E057, "M", "ж"), - (0x1E058, "M", "з"), - (0x1E059, "M", "и"), - (0x1E05A, "M", "к"), - (0x1E05B, "M", "л"), - (0x1E05C, "M", "о"), - (0x1E05D, "M", "п"), - (0x1E05E, "M", "с"), - (0x1E05F, "M", "у"), - (0x1E060, "M", "ф"), - (0x1E061, "M", "х"), - (0x1E062, "M", "ц"), - (0x1E063, "M", "ч"), - (0x1E064, "M", "ш"), - (0x1E065, "M", "ъ"), - (0x1E066, "M", "ы"), - (0x1E067, "M", "ґ"), - (0x1E068, "M", "і"), - (0x1E069, "M", "ѕ"), - (0x1E06A, "M", "џ"), - (0x1E06B, "M", "ҫ"), - (0x1E06C, "M", "ꙑ"), - (0x1E06D, "M", "ұ"), - (0x1E06E, "X"), - (0x1E08F, "V"), - (0x1E090, "X"), - (0x1E100, "V"), - (0x1E12D, "X"), - (0x1E130, "V"), - (0x1E13E, "X"), - (0x1E140, "V"), - (0x1E14A, "X"), - (0x1E14E, "V"), - (0x1E150, "X"), - (0x1E290, "V"), - (0x1E2AF, "X"), - (0x1E2C0, "V"), - (0x1E2FA, "X"), - (0x1E2FF, "V"), - (0x1E300, "X"), - (0x1E4D0, "V"), - (0x1E4FA, "X"), - (0x1E7E0, "V"), - (0x1E7E7, "X"), - (0x1E7E8, "V"), - (0x1E7EC, "X"), - (0x1E7ED, "V"), - (0x1E7EF, "X"), - (0x1E7F0, "V"), - (0x1E7FF, "X"), - (0x1E800, "V"), - (0x1E8C5, "X"), - (0x1E8C7, "V"), - (0x1E8D7, "X"), - (0x1E900, "M", "𞤢"), - (0x1E901, "M", "𞤣"), - (0x1E902, "M", "𞤤"), - (0x1E903, "M", "𞤥"), - (0x1E904, "M", "𞤦"), - (0x1E905, "M", "𞤧"), - (0x1E906, "M", "𞤨"), - (0x1E907, "M", "𞤩"), - (0x1E908, "M", "𞤪"), - (0x1E909, "M", "𞤫"), - (0x1E90A, "M", "𞤬"), - (0x1E90B, "M", "𞤭"), - (0x1E90C, "M", "𞤮"), - (0x1E90D, "M", "𞤯"), - ] - - -def _seg_72() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1E90E, "M", "𞤰"), - (0x1E90F, "M", "𞤱"), - (0x1E910, "M", "𞤲"), - (0x1E911, "M", "𞤳"), - (0x1E912, "M", "𞤴"), - (0x1E913, "M", "𞤵"), - (0x1E914, "M", "𞤶"), - (0x1E915, "M", "𞤷"), - (0x1E916, "M", "𞤸"), - (0x1E917, "M", "𞤹"), - (0x1E918, "M", "𞤺"), - (0x1E919, "M", "𞤻"), - (0x1E91A, "M", "𞤼"), - (0x1E91B, "M", "𞤽"), - (0x1E91C, "M", "𞤾"), - (0x1E91D, "M", "𞤿"), - (0x1E91E, "M", "𞥀"), - (0x1E91F, "M", "𞥁"), - (0x1E920, "M", "𞥂"), - (0x1E921, "M", "𞥃"), - (0x1E922, "V"), - (0x1E94C, "X"), - (0x1E950, "V"), - (0x1E95A, "X"), - (0x1E95E, "V"), - (0x1E960, "X"), - (0x1EC71, "V"), - (0x1ECB5, "X"), - (0x1ED01, "V"), - (0x1ED3E, "X"), - (0x1EE00, "M", "ا"), - (0x1EE01, "M", "ب"), - (0x1EE02, "M", "ج"), - (0x1EE03, "M", "د"), - (0x1EE04, "X"), - (0x1EE05, "M", "و"), - (0x1EE06, "M", "ز"), - (0x1EE07, "M", "ح"), - (0x1EE08, "M", "ط"), - (0x1EE09, "M", "ي"), - (0x1EE0A, "M", "ك"), - (0x1EE0B, "M", "ل"), - (0x1EE0C, "M", "م"), - (0x1EE0D, "M", "ن"), - (0x1EE0E, "M", "س"), - (0x1EE0F, "M", "ع"), - (0x1EE10, "M", "ف"), - (0x1EE11, "M", "ص"), - (0x1EE12, "M", "ق"), - (0x1EE13, "M", "ر"), - (0x1EE14, "M", "ش"), - (0x1EE15, "M", "ت"), - (0x1EE16, "M", "ث"), - (0x1EE17, "M", "خ"), - (0x1EE18, "M", "ذ"), - (0x1EE19, "M", "ض"), - (0x1EE1A, "M", "ظ"), - (0x1EE1B, "M", "غ"), - (0x1EE1C, "M", "ٮ"), - (0x1EE1D, "M", "ں"), - (0x1EE1E, "M", "ڡ"), - (0x1EE1F, "M", "ٯ"), - (0x1EE20, "X"), - (0x1EE21, "M", "ب"), - (0x1EE22, "M", "ج"), - (0x1EE23, "X"), - (0x1EE24, "M", "ه"), - (0x1EE25, "X"), - (0x1EE27, "M", "ح"), - (0x1EE28, "X"), - (0x1EE29, "M", "ي"), - (0x1EE2A, "M", "ك"), - (0x1EE2B, "M", "ل"), - (0x1EE2C, "M", "م"), - (0x1EE2D, "M", "ن"), - (0x1EE2E, "M", "س"), - (0x1EE2F, "M", "ع"), - (0x1EE30, "M", "ف"), - (0x1EE31, "M", "ص"), - (0x1EE32, "M", "ق"), - (0x1EE33, "X"), - (0x1EE34, "M", "ش"), - (0x1EE35, "M", "ت"), - (0x1EE36, "M", "ث"), - (0x1EE37, "M", "خ"), - (0x1EE38, "X"), - (0x1EE39, "M", "ض"), - (0x1EE3A, "X"), - (0x1EE3B, "M", "غ"), - (0x1EE3C, "X"), - (0x1EE42, "M", "ج"), - (0x1EE43, "X"), - (0x1EE47, "M", "ح"), - (0x1EE48, "X"), - (0x1EE49, "M", "ي"), - (0x1EE4A, "X"), - (0x1EE4B, "M", "ل"), - (0x1EE4C, "X"), - (0x1EE4D, "M", "ن"), - (0x1EE4E, "M", "س"), - ] - - -def _seg_73() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EE4F, "M", "ع"), - (0x1EE50, "X"), - (0x1EE51, "M", "ص"), - (0x1EE52, "M", "ق"), - (0x1EE53, "X"), - (0x1EE54, "M", "ش"), - (0x1EE55, "X"), - (0x1EE57, "M", "خ"), - (0x1EE58, "X"), - (0x1EE59, "M", "ض"), - (0x1EE5A, "X"), - (0x1EE5B, "M", "غ"), - (0x1EE5C, "X"), - (0x1EE5D, "M", "ں"), - (0x1EE5E, "X"), - (0x1EE5F, "M", "ٯ"), - (0x1EE60, "X"), - (0x1EE61, "M", "ب"), - (0x1EE62, "M", "ج"), - (0x1EE63, "X"), - (0x1EE64, "M", "ه"), - (0x1EE65, "X"), - (0x1EE67, "M", "ح"), - (0x1EE68, "M", "ط"), - (0x1EE69, "M", "ي"), - (0x1EE6A, "M", "ك"), - (0x1EE6B, "X"), - (0x1EE6C, "M", "م"), - (0x1EE6D, "M", "ن"), - (0x1EE6E, "M", "س"), - (0x1EE6F, "M", "ع"), - (0x1EE70, "M", "ف"), - (0x1EE71, "M", "ص"), - (0x1EE72, "M", "ق"), - (0x1EE73, "X"), - (0x1EE74, "M", "ش"), - (0x1EE75, "M", "ت"), - (0x1EE76, "M", "ث"), - (0x1EE77, "M", "خ"), - (0x1EE78, "X"), - (0x1EE79, "M", "ض"), - (0x1EE7A, "M", "ظ"), - (0x1EE7B, "M", "غ"), - (0x1EE7C, "M", "ٮ"), - (0x1EE7D, "X"), - (0x1EE7E, "M", "ڡ"), - (0x1EE7F, "X"), - (0x1EE80, "M", "ا"), - (0x1EE81, "M", "ب"), - (0x1EE82, "M", "ج"), - (0x1EE83, "M", "د"), - (0x1EE84, "M", "ه"), - (0x1EE85, "M", "و"), - (0x1EE86, "M", "ز"), - (0x1EE87, "M", "ح"), - (0x1EE88, "M", "ط"), - (0x1EE89, "M", "ي"), - (0x1EE8A, "X"), - (0x1EE8B, "M", "ل"), - (0x1EE8C, "M", "م"), - (0x1EE8D, "M", "ن"), - (0x1EE8E, "M", "س"), - (0x1EE8F, "M", "ع"), - (0x1EE90, "M", "ف"), - (0x1EE91, "M", "ص"), - (0x1EE92, "M", "ق"), - (0x1EE93, "M", "ر"), - (0x1EE94, "M", "ش"), - (0x1EE95, "M", "ت"), - (0x1EE96, "M", "ث"), - (0x1EE97, "M", "خ"), - (0x1EE98, "M", "ذ"), - (0x1EE99, "M", "ض"), - (0x1EE9A, "M", "ظ"), - (0x1EE9B, "M", "غ"), - (0x1EE9C, "X"), - (0x1EEA1, "M", "ب"), - (0x1EEA2, "M", "ج"), - (0x1EEA3, "M", "د"), - (0x1EEA4, "X"), - (0x1EEA5, "M", "و"), - (0x1EEA6, "M", "ز"), - (0x1EEA7, "M", "ح"), - (0x1EEA8, "M", "ط"), - (0x1EEA9, "M", "ي"), - (0x1EEAA, "X"), - (0x1EEAB, "M", "ل"), - (0x1EEAC, "M", "م"), - (0x1EEAD, "M", "ن"), - (0x1EEAE, "M", "س"), - (0x1EEAF, "M", "ع"), - (0x1EEB0, "M", "ف"), - (0x1EEB1, "M", "ص"), - (0x1EEB2, "M", "ق"), - (0x1EEB3, "M", "ر"), - (0x1EEB4, "M", "ش"), - (0x1EEB5, "M", "ت"), - (0x1EEB6, "M", "ث"), - (0x1EEB7, "M", "خ"), - (0x1EEB8, "M", "ذ"), - ] - - -def _seg_74() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1EEB9, "M", "ض"), - (0x1EEBA, "M", "ظ"), - (0x1EEBB, "M", "غ"), - (0x1EEBC, "X"), - (0x1EEF0, "V"), - (0x1EEF2, "X"), - (0x1F000, "V"), - (0x1F02C, "X"), - (0x1F030, "V"), - (0x1F094, "X"), - (0x1F0A0, "V"), - (0x1F0AF, "X"), - (0x1F0B1, "V"), - (0x1F0C0, "X"), - (0x1F0C1, "V"), - (0x1F0D0, "X"), - (0x1F0D1, "V"), - (0x1F0F6, "X"), - (0x1F101, "3", "0,"), - (0x1F102, "3", "1,"), - (0x1F103, "3", "2,"), - (0x1F104, "3", "3,"), - (0x1F105, "3", "4,"), - (0x1F106, "3", "5,"), - (0x1F107, "3", "6,"), - (0x1F108, "3", "7,"), - (0x1F109, "3", "8,"), - (0x1F10A, "3", "9,"), - (0x1F10B, "V"), - (0x1F110, "3", "(a)"), - (0x1F111, "3", "(b)"), - (0x1F112, "3", "(c)"), - (0x1F113, "3", "(d)"), - (0x1F114, "3", "(e)"), - (0x1F115, "3", "(f)"), - (0x1F116, "3", "(g)"), - (0x1F117, "3", "(h)"), - (0x1F118, "3", "(i)"), - (0x1F119, "3", "(j)"), - (0x1F11A, "3", "(k)"), - (0x1F11B, "3", "(l)"), - (0x1F11C, "3", "(m)"), - (0x1F11D, "3", "(n)"), - (0x1F11E, "3", "(o)"), - (0x1F11F, "3", "(p)"), - (0x1F120, "3", "(q)"), - (0x1F121, "3", "(r)"), - (0x1F122, "3", "(s)"), - (0x1F123, "3", "(t)"), - (0x1F124, "3", "(u)"), - (0x1F125, "3", "(v)"), - (0x1F126, "3", "(w)"), - (0x1F127, "3", "(x)"), - (0x1F128, "3", "(y)"), - (0x1F129, "3", "(z)"), - (0x1F12A, "M", "〔s〕"), - (0x1F12B, "M", "c"), - (0x1F12C, "M", "r"), - (0x1F12D, "M", "cd"), - (0x1F12E, "M", "wz"), - (0x1F12F, "V"), - (0x1F130, "M", "a"), - (0x1F131, "M", "b"), - (0x1F132, "M", "c"), - (0x1F133, "M", "d"), - (0x1F134, "M", "e"), - (0x1F135, "M", "f"), - (0x1F136, "M", "g"), - (0x1F137, "M", "h"), - (0x1F138, "M", "i"), - (0x1F139, "M", "j"), - (0x1F13A, "M", "k"), - (0x1F13B, "M", "l"), - (0x1F13C, "M", "m"), - (0x1F13D, "M", "n"), - (0x1F13E, "M", "o"), - (0x1F13F, "M", "p"), - (0x1F140, "M", "q"), - (0x1F141, "M", "r"), - (0x1F142, "M", "s"), - (0x1F143, "M", "t"), - (0x1F144, "M", "u"), - (0x1F145, "M", "v"), - (0x1F146, "M", "w"), - (0x1F147, "M", "x"), - (0x1F148, "M", "y"), - (0x1F149, "M", "z"), - (0x1F14A, "M", "hv"), - (0x1F14B, "M", "mv"), - (0x1F14C, "M", "sd"), - (0x1F14D, "M", "ss"), - (0x1F14E, "M", "ppv"), - (0x1F14F, "M", "wc"), - (0x1F150, "V"), - (0x1F16A, "M", "mc"), - (0x1F16B, "M", "md"), - (0x1F16C, "M", "mr"), - (0x1F16D, "V"), - (0x1F190, "M", "dj"), - (0x1F191, "V"), - ] - - -def _seg_75() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1F1AE, "X"), - (0x1F1E6, "V"), - (0x1F200, "M", "ほか"), - (0x1F201, "M", "ココ"), - (0x1F202, "M", "サ"), - (0x1F203, "X"), - (0x1F210, "M", "手"), - (0x1F211, "M", "字"), - (0x1F212, "M", "双"), - (0x1F213, "M", "デ"), - (0x1F214, "M", "二"), - (0x1F215, "M", "多"), - (0x1F216, "M", "解"), - (0x1F217, "M", "天"), - (0x1F218, "M", "交"), - (0x1F219, "M", "映"), - (0x1F21A, "M", "無"), - (0x1F21B, "M", "料"), - (0x1F21C, "M", "前"), - (0x1F21D, "M", "後"), - (0x1F21E, "M", "再"), - (0x1F21F, "M", "新"), - (0x1F220, "M", "初"), - (0x1F221, "M", "終"), - (0x1F222, "M", "生"), - (0x1F223, "M", "販"), - (0x1F224, "M", "声"), - (0x1F225, "M", "吹"), - (0x1F226, "M", "演"), - (0x1F227, "M", "投"), - (0x1F228, "M", "捕"), - (0x1F229, "M", "一"), - (0x1F22A, "M", "三"), - (0x1F22B, "M", "遊"), - (0x1F22C, "M", "左"), - (0x1F22D, "M", "中"), - (0x1F22E, "M", "右"), - (0x1F22F, "M", "指"), - (0x1F230, "M", "走"), - (0x1F231, "M", "打"), - (0x1F232, "M", "禁"), - (0x1F233, "M", "空"), - (0x1F234, "M", "合"), - (0x1F235, "M", "満"), - (0x1F236, "M", "有"), - (0x1F237, "M", "月"), - (0x1F238, "M", "申"), - (0x1F239, "M", "割"), - (0x1F23A, "M", "営"), - (0x1F23B, "M", "配"), - (0x1F23C, "X"), - (0x1F240, "M", "〔本〕"), - (0x1F241, "M", "〔三〕"), - (0x1F242, "M", "〔二〕"), - (0x1F243, "M", "〔安〕"), - (0x1F244, "M", "〔点〕"), - (0x1F245, "M", "〔打〕"), - (0x1F246, "M", "〔盗〕"), - (0x1F247, "M", "〔勝〕"), - (0x1F248, "M", "〔敗〕"), - (0x1F249, "X"), - (0x1F250, "M", "得"), - (0x1F251, "M", "可"), - (0x1F252, "X"), - (0x1F260, "V"), - (0x1F266, "X"), - (0x1F300, "V"), - (0x1F6D8, "X"), - (0x1F6DC, "V"), - (0x1F6ED, "X"), - (0x1F6F0, "V"), - (0x1F6FD, "X"), - (0x1F700, "V"), - (0x1F777, "X"), - (0x1F77B, "V"), - (0x1F7DA, "X"), - (0x1F7E0, "V"), - (0x1F7EC, "X"), - (0x1F7F0, "V"), - (0x1F7F1, "X"), - (0x1F800, "V"), - (0x1F80C, "X"), - (0x1F810, "V"), - (0x1F848, "X"), - (0x1F850, "V"), - (0x1F85A, "X"), - (0x1F860, "V"), - (0x1F888, "X"), - (0x1F890, "V"), - (0x1F8AE, "X"), - (0x1F8B0, "V"), - (0x1F8B2, "X"), - (0x1F900, "V"), - (0x1FA54, "X"), - (0x1FA60, "V"), - (0x1FA6E, "X"), - (0x1FA70, "V"), - (0x1FA7D, "X"), - (0x1FA80, "V"), - (0x1FA89, "X"), - ] - - -def _seg_76() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x1FA90, "V"), - (0x1FABE, "X"), - (0x1FABF, "V"), - (0x1FAC6, "X"), - (0x1FACE, "V"), - (0x1FADC, "X"), - (0x1FAE0, "V"), - (0x1FAE9, "X"), - (0x1FAF0, "V"), - (0x1FAF9, "X"), - (0x1FB00, "V"), - (0x1FB93, "X"), - (0x1FB94, "V"), - (0x1FBCB, "X"), - (0x1FBF0, "M", "0"), - (0x1FBF1, "M", "1"), - (0x1FBF2, "M", "2"), - (0x1FBF3, "M", "3"), - (0x1FBF4, "M", "4"), - (0x1FBF5, "M", "5"), - (0x1FBF6, "M", "6"), - (0x1FBF7, "M", "7"), - (0x1FBF8, "M", "8"), - (0x1FBF9, "M", "9"), - (0x1FBFA, "X"), - (0x20000, "V"), - (0x2A6E0, "X"), - (0x2A700, "V"), - (0x2B73A, "X"), - (0x2B740, "V"), - (0x2B81E, "X"), - (0x2B820, "V"), - (0x2CEA2, "X"), - (0x2CEB0, "V"), - (0x2EBE1, "X"), - (0x2EBF0, "V"), - (0x2EE5E, "X"), - (0x2F800, "M", "丽"), - (0x2F801, "M", "丸"), - (0x2F802, "M", "乁"), - (0x2F803, "M", "𠄢"), - (0x2F804, "M", "你"), - (0x2F805, "M", "侮"), - (0x2F806, "M", "侻"), - (0x2F807, "M", "倂"), - (0x2F808, "M", "偺"), - (0x2F809, "M", "備"), - (0x2F80A, "M", "僧"), - (0x2F80B, "M", "像"), - (0x2F80C, "M", "㒞"), - (0x2F80D, "M", "𠘺"), - (0x2F80E, "M", "免"), - (0x2F80F, "M", "兔"), - (0x2F810, "M", "兤"), - (0x2F811, "M", "具"), - (0x2F812, "M", "𠔜"), - (0x2F813, "M", "㒹"), - (0x2F814, "M", "內"), - (0x2F815, "M", "再"), - (0x2F816, "M", "𠕋"), - (0x2F817, "M", "冗"), - (0x2F818, "M", "冤"), - (0x2F819, "M", "仌"), - (0x2F81A, "M", "冬"), - (0x2F81B, "M", "况"), - (0x2F81C, "M", "𩇟"), - (0x2F81D, "M", "凵"), - (0x2F81E, "M", "刃"), - (0x2F81F, "M", "㓟"), - (0x2F820, "M", "刻"), - (0x2F821, "M", "剆"), - (0x2F822, "M", "割"), - (0x2F823, "M", "剷"), - (0x2F824, "M", "㔕"), - (0x2F825, "M", "勇"), - (0x2F826, "M", "勉"), - (0x2F827, "M", "勤"), - (0x2F828, "M", "勺"), - (0x2F829, "M", "包"), - (0x2F82A, "M", "匆"), - (0x2F82B, "M", "北"), - (0x2F82C, "M", "卉"), - (0x2F82D, "M", "卑"), - (0x2F82E, "M", "博"), - (0x2F82F, "M", "即"), - (0x2F830, "M", "卽"), - (0x2F831, "M", "卿"), - (0x2F834, "M", "𠨬"), - (0x2F835, "M", "灰"), - (0x2F836, "M", "及"), - (0x2F837, "M", "叟"), - (0x2F838, "M", "𠭣"), - (0x2F839, "M", "叫"), - (0x2F83A, "M", "叱"), - (0x2F83B, "M", "吆"), - (0x2F83C, "M", "咞"), - (0x2F83D, "M", "吸"), - (0x2F83E, "M", "呈"), - (0x2F83F, "M", "周"), - (0x2F840, "M", "咢"), - ] - - -def _seg_77() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F841, "M", "哶"), - (0x2F842, "M", "唐"), - (0x2F843, "M", "啓"), - (0x2F844, "M", "啣"), - (0x2F845, "M", "善"), - (0x2F847, "M", "喙"), - (0x2F848, "M", "喫"), - (0x2F849, "M", "喳"), - (0x2F84A, "M", "嗂"), - (0x2F84B, "M", "圖"), - (0x2F84C, "M", "嘆"), - (0x2F84D, "M", "圗"), - (0x2F84E, "M", "噑"), - (0x2F84F, "M", "噴"), - (0x2F850, "M", "切"), - (0x2F851, "M", "壮"), - (0x2F852, "M", "城"), - (0x2F853, "M", "埴"), - (0x2F854, "M", "堍"), - (0x2F855, "M", "型"), - (0x2F856, "M", "堲"), - (0x2F857, "M", "報"), - (0x2F858, "M", "墬"), - (0x2F859, "M", "𡓤"), - (0x2F85A, "M", "売"), - (0x2F85B, "M", "壷"), - (0x2F85C, "M", "夆"), - (0x2F85D, "M", "多"), - (0x2F85E, "M", "夢"), - (0x2F85F, "M", "奢"), - (0x2F860, "M", "𡚨"), - (0x2F861, "M", "𡛪"), - (0x2F862, "M", "姬"), - (0x2F863, "M", "娛"), - (0x2F864, "M", "娧"), - (0x2F865, "M", "姘"), - (0x2F866, "M", "婦"), - (0x2F867, "M", "㛮"), - (0x2F868, "X"), - (0x2F869, "M", "嬈"), - (0x2F86A, "M", "嬾"), - (0x2F86C, "M", "𡧈"), - (0x2F86D, "M", "寃"), - (0x2F86E, "M", "寘"), - (0x2F86F, "M", "寧"), - (0x2F870, "M", "寳"), - (0x2F871, "M", "𡬘"), - (0x2F872, "M", "寿"), - (0x2F873, "M", "将"), - (0x2F874, "X"), - (0x2F875, "M", "尢"), - (0x2F876, "M", "㞁"), - (0x2F877, "M", "屠"), - (0x2F878, "M", "屮"), - (0x2F879, "M", "峀"), - (0x2F87A, "M", "岍"), - (0x2F87B, "M", "𡷤"), - (0x2F87C, "M", "嵃"), - (0x2F87D, "M", "𡷦"), - (0x2F87E, "M", "嵮"), - (0x2F87F, "M", "嵫"), - (0x2F880, "M", "嵼"), - (0x2F881, "M", "巡"), - (0x2F882, "M", "巢"), - (0x2F883, "M", "㠯"), - (0x2F884, "M", "巽"), - (0x2F885, "M", "帨"), - (0x2F886, "M", "帽"), - (0x2F887, "M", "幩"), - (0x2F888, "M", "㡢"), - (0x2F889, "M", "𢆃"), - (0x2F88A, "M", "㡼"), - (0x2F88B, "M", "庰"), - (0x2F88C, "M", "庳"), - (0x2F88D, "M", "庶"), - (0x2F88E, "M", "廊"), - (0x2F88F, "M", "𪎒"), - (0x2F890, "M", "廾"), - (0x2F891, "M", "𢌱"), - (0x2F893, "M", "舁"), - (0x2F894, "M", "弢"), - (0x2F896, "M", "㣇"), - (0x2F897, "M", "𣊸"), - (0x2F898, "M", "𦇚"), - (0x2F899, "M", "形"), - (0x2F89A, "M", "彫"), - (0x2F89B, "M", "㣣"), - (0x2F89C, "M", "徚"), - (0x2F89D, "M", "忍"), - (0x2F89E, "M", "志"), - (0x2F89F, "M", "忹"), - (0x2F8A0, "M", "悁"), - (0x2F8A1, "M", "㤺"), - (0x2F8A2, "M", "㤜"), - (0x2F8A3, "M", "悔"), - (0x2F8A4, "M", "𢛔"), - (0x2F8A5, "M", "惇"), - (0x2F8A6, "M", "慈"), - (0x2F8A7, "M", "慌"), - (0x2F8A8, "M", "慎"), - ] - - -def _seg_78() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F8A9, "M", "慌"), - (0x2F8AA, "M", "慺"), - (0x2F8AB, "M", "憎"), - (0x2F8AC, "M", "憲"), - (0x2F8AD, "M", "憤"), - (0x2F8AE, "M", "憯"), - (0x2F8AF, "M", "懞"), - (0x2F8B0, "M", "懲"), - (0x2F8B1, "M", "懶"), - (0x2F8B2, "M", "成"), - (0x2F8B3, "M", "戛"), - (0x2F8B4, "M", "扝"), - (0x2F8B5, "M", "抱"), - (0x2F8B6, "M", "拔"), - (0x2F8B7, "M", "捐"), - (0x2F8B8, "M", "𢬌"), - (0x2F8B9, "M", "挽"), - (0x2F8BA, "M", "拼"), - (0x2F8BB, "M", "捨"), - (0x2F8BC, "M", "掃"), - (0x2F8BD, "M", "揤"), - (0x2F8BE, "M", "𢯱"), - (0x2F8BF, "M", "搢"), - (0x2F8C0, "M", "揅"), - (0x2F8C1, "M", "掩"), - (0x2F8C2, "M", "㨮"), - (0x2F8C3, "M", "摩"), - (0x2F8C4, "M", "摾"), - (0x2F8C5, "M", "撝"), - (0x2F8C6, "M", "摷"), - (0x2F8C7, "M", "㩬"), - (0x2F8C8, "M", "敏"), - (0x2F8C9, "M", "敬"), - (0x2F8CA, "M", "𣀊"), - (0x2F8CB, "M", "旣"), - (0x2F8CC, "M", "書"), - (0x2F8CD, "M", "晉"), - (0x2F8CE, "M", "㬙"), - (0x2F8CF, "M", "暑"), - (0x2F8D0, "M", "㬈"), - (0x2F8D1, "M", "㫤"), - (0x2F8D2, "M", "冒"), - (0x2F8D3, "M", "冕"), - (0x2F8D4, "M", "最"), - (0x2F8D5, "M", "暜"), - (0x2F8D6, "M", "肭"), - (0x2F8D7, "M", "䏙"), - (0x2F8D8, "M", "朗"), - (0x2F8D9, "M", "望"), - (0x2F8DA, "M", "朡"), - (0x2F8DB, "M", "杞"), - (0x2F8DC, "M", "杓"), - (0x2F8DD, "M", "𣏃"), - (0x2F8DE, "M", "㭉"), - (0x2F8DF, "M", "柺"), - (0x2F8E0, "M", "枅"), - (0x2F8E1, "M", "桒"), - (0x2F8E2, "M", "梅"), - (0x2F8E3, "M", "𣑭"), - (0x2F8E4, "M", "梎"), - (0x2F8E5, "M", "栟"), - (0x2F8E6, "M", "椔"), - (0x2F8E7, "M", "㮝"), - (0x2F8E8, "M", "楂"), - (0x2F8E9, "M", "榣"), - (0x2F8EA, "M", "槪"), - (0x2F8EB, "M", "檨"), - (0x2F8EC, "M", "𣚣"), - (0x2F8ED, "M", "櫛"), - (0x2F8EE, "M", "㰘"), - (0x2F8EF, "M", "次"), - (0x2F8F0, "M", "𣢧"), - (0x2F8F1, "M", "歔"), - (0x2F8F2, "M", "㱎"), - (0x2F8F3, "M", "歲"), - (0x2F8F4, "M", "殟"), - (0x2F8F5, "M", "殺"), - (0x2F8F6, "M", "殻"), - (0x2F8F7, "M", "𣪍"), - (0x2F8F8, "M", "𡴋"), - (0x2F8F9, "M", "𣫺"), - (0x2F8FA, "M", "汎"), - (0x2F8FB, "M", "𣲼"), - (0x2F8FC, "M", "沿"), - (0x2F8FD, "M", "泍"), - (0x2F8FE, "M", "汧"), - (0x2F8FF, "M", "洖"), - (0x2F900, "M", "派"), - (0x2F901, "M", "海"), - (0x2F902, "M", "流"), - (0x2F903, "M", "浩"), - (0x2F904, "M", "浸"), - (0x2F905, "M", "涅"), - (0x2F906, "M", "𣴞"), - (0x2F907, "M", "洴"), - (0x2F908, "M", "港"), - (0x2F909, "M", "湮"), - (0x2F90A, "M", "㴳"), - (0x2F90B, "M", "滋"), - (0x2F90C, "M", "滇"), - ] - - -def _seg_79() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F90D, "M", "𣻑"), - (0x2F90E, "M", "淹"), - (0x2F90F, "M", "潮"), - (0x2F910, "M", "𣽞"), - (0x2F911, "M", "𣾎"), - (0x2F912, "M", "濆"), - (0x2F913, "M", "瀹"), - (0x2F914, "M", "瀞"), - (0x2F915, "M", "瀛"), - (0x2F916, "M", "㶖"), - (0x2F917, "M", "灊"), - (0x2F918, "M", "災"), - (0x2F919, "M", "灷"), - (0x2F91A, "M", "炭"), - (0x2F91B, "M", "𠔥"), - (0x2F91C, "M", "煅"), - (0x2F91D, "M", "𤉣"), - (0x2F91E, "M", "熜"), - (0x2F91F, "X"), - (0x2F920, "M", "爨"), - (0x2F921, "M", "爵"), - (0x2F922, "M", "牐"), - (0x2F923, "M", "𤘈"), - (0x2F924, "M", "犀"), - (0x2F925, "M", "犕"), - (0x2F926, "M", "𤜵"), - (0x2F927, "M", "𤠔"), - (0x2F928, "M", "獺"), - (0x2F929, "M", "王"), - (0x2F92A, "M", "㺬"), - (0x2F92B, "M", "玥"), - (0x2F92C, "M", "㺸"), - (0x2F92E, "M", "瑇"), - (0x2F92F, "M", "瑜"), - (0x2F930, "M", "瑱"), - (0x2F931, "M", "璅"), - (0x2F932, "M", "瓊"), - (0x2F933, "M", "㼛"), - (0x2F934, "M", "甤"), - (0x2F935, "M", "𤰶"), - (0x2F936, "M", "甾"), - (0x2F937, "M", "𤲒"), - (0x2F938, "M", "異"), - (0x2F939, "M", "𢆟"), - (0x2F93A, "M", "瘐"), - (0x2F93B, "M", "𤾡"), - (0x2F93C, "M", "𤾸"), - (0x2F93D, "M", "𥁄"), - (0x2F93E, "M", "㿼"), - (0x2F93F, "M", "䀈"), - (0x2F940, "M", "直"), - (0x2F941, "M", "𥃳"), - (0x2F942, "M", "𥃲"), - (0x2F943, "M", "𥄙"), - (0x2F944, "M", "𥄳"), - (0x2F945, "M", "眞"), - (0x2F946, "M", "真"), - (0x2F948, "M", "睊"), - (0x2F949, "M", "䀹"), - (0x2F94A, "M", "瞋"), - (0x2F94B, "M", "䁆"), - (0x2F94C, "M", "䂖"), - (0x2F94D, "M", "𥐝"), - (0x2F94E, "M", "硎"), - (0x2F94F, "M", "碌"), - (0x2F950, "M", "磌"), - (0x2F951, "M", "䃣"), - (0x2F952, "M", "𥘦"), - (0x2F953, "M", "祖"), - (0x2F954, "M", "𥚚"), - (0x2F955, "M", "𥛅"), - (0x2F956, "M", "福"), - (0x2F957, "M", "秫"), - (0x2F958, "M", "䄯"), - (0x2F959, "M", "穀"), - (0x2F95A, "M", "穊"), - (0x2F95B, "M", "穏"), - (0x2F95C, "M", "𥥼"), - (0x2F95D, "M", "𥪧"), - (0x2F95F, "X"), - (0x2F960, "M", "䈂"), - (0x2F961, "M", "𥮫"), - (0x2F962, "M", "篆"), - (0x2F963, "M", "築"), - (0x2F964, "M", "䈧"), - (0x2F965, "M", "𥲀"), - (0x2F966, "M", "糒"), - (0x2F967, "M", "䊠"), - (0x2F968, "M", "糨"), - (0x2F969, "M", "糣"), - (0x2F96A, "M", "紀"), - (0x2F96B, "M", "𥾆"), - (0x2F96C, "M", "絣"), - (0x2F96D, "M", "䌁"), - (0x2F96E, "M", "緇"), - (0x2F96F, "M", "縂"), - (0x2F970, "M", "繅"), - (0x2F971, "M", "䌴"), - (0x2F972, "M", "𦈨"), - (0x2F973, "M", "𦉇"), - ] - - -def _seg_80() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F974, "M", "䍙"), - (0x2F975, "M", "𦋙"), - (0x2F976, "M", "罺"), - (0x2F977, "M", "𦌾"), - (0x2F978, "M", "羕"), - (0x2F979, "M", "翺"), - (0x2F97A, "M", "者"), - (0x2F97B, "M", "𦓚"), - (0x2F97C, "M", "𦔣"), - (0x2F97D, "M", "聠"), - (0x2F97E, "M", "𦖨"), - (0x2F97F, "M", "聰"), - (0x2F980, "M", "𣍟"), - (0x2F981, "M", "䏕"), - (0x2F982, "M", "育"), - (0x2F983, "M", "脃"), - (0x2F984, "M", "䐋"), - (0x2F985, "M", "脾"), - (0x2F986, "M", "媵"), - (0x2F987, "M", "𦞧"), - (0x2F988, "M", "𦞵"), - (0x2F989, "M", "𣎓"), - (0x2F98A, "M", "𣎜"), - (0x2F98B, "M", "舁"), - (0x2F98C, "M", "舄"), - (0x2F98D, "M", "辞"), - (0x2F98E, "M", "䑫"), - (0x2F98F, "M", "芑"), - (0x2F990, "M", "芋"), - (0x2F991, "M", "芝"), - (0x2F992, "M", "劳"), - (0x2F993, "M", "花"), - (0x2F994, "M", "芳"), - (0x2F995, "M", "芽"), - (0x2F996, "M", "苦"), - (0x2F997, "M", "𦬼"), - (0x2F998, "M", "若"), - (0x2F999, "M", "茝"), - (0x2F99A, "M", "荣"), - (0x2F99B, "M", "莭"), - (0x2F99C, "M", "茣"), - (0x2F99D, "M", "莽"), - (0x2F99E, "M", "菧"), - (0x2F99F, "M", "著"), - (0x2F9A0, "M", "荓"), - (0x2F9A1, "M", "菊"), - (0x2F9A2, "M", "菌"), - (0x2F9A3, "M", "菜"), - (0x2F9A4, "M", "𦰶"), - (0x2F9A5, "M", "𦵫"), - (0x2F9A6, "M", "𦳕"), - (0x2F9A7, "M", "䔫"), - (0x2F9A8, "M", "蓱"), - (0x2F9A9, "M", "蓳"), - (0x2F9AA, "M", "蔖"), - (0x2F9AB, "M", "𧏊"), - (0x2F9AC, "M", "蕤"), - (0x2F9AD, "M", "𦼬"), - (0x2F9AE, "M", "䕝"), - (0x2F9AF, "M", "䕡"), - (0x2F9B0, "M", "𦾱"), - (0x2F9B1, "M", "𧃒"), - (0x2F9B2, "M", "䕫"), - (0x2F9B3, "M", "虐"), - (0x2F9B4, "M", "虜"), - (0x2F9B5, "M", "虧"), - (0x2F9B6, "M", "虩"), - (0x2F9B7, "M", "蚩"), - (0x2F9B8, "M", "蚈"), - (0x2F9B9, "M", "蜎"), - (0x2F9BA, "M", "蛢"), - (0x2F9BB, "M", "蝹"), - (0x2F9BC, "M", "蜨"), - (0x2F9BD, "M", "蝫"), - (0x2F9BE, "M", "螆"), - (0x2F9BF, "X"), - (0x2F9C0, "M", "蟡"), - (0x2F9C1, "M", "蠁"), - (0x2F9C2, "M", "䗹"), - (0x2F9C3, "M", "衠"), - (0x2F9C4, "M", "衣"), - (0x2F9C5, "M", "𧙧"), - (0x2F9C6, "M", "裗"), - (0x2F9C7, "M", "裞"), - (0x2F9C8, "M", "䘵"), - (0x2F9C9, "M", "裺"), - (0x2F9CA, "M", "㒻"), - (0x2F9CB, "M", "𧢮"), - (0x2F9CC, "M", "𧥦"), - (0x2F9CD, "M", "䚾"), - (0x2F9CE, "M", "䛇"), - (0x2F9CF, "M", "誠"), - (0x2F9D0, "M", "諭"), - (0x2F9D1, "M", "變"), - (0x2F9D2, "M", "豕"), - (0x2F9D3, "M", "𧲨"), - (0x2F9D4, "M", "貫"), - (0x2F9D5, "M", "賁"), - (0x2F9D6, "M", "贛"), - (0x2F9D7, "M", "起"), - ] - - -def _seg_81() -> List[Union[Tuple[int, str], Tuple[int, str, str]]]: - return [ - (0x2F9D8, "M", "𧼯"), - (0x2F9D9, "M", "𠠄"), - (0x2F9DA, "M", "跋"), - (0x2F9DB, "M", "趼"), - (0x2F9DC, "M", "跰"), - (0x2F9DD, "M", "𠣞"), - (0x2F9DE, "M", "軔"), - (0x2F9DF, "M", "輸"), - (0x2F9E0, "M", "𨗒"), - (0x2F9E1, "M", "𨗭"), - (0x2F9E2, "M", "邔"), - (0x2F9E3, "M", "郱"), - (0x2F9E4, "M", "鄑"), - (0x2F9E5, "M", "𨜮"), - (0x2F9E6, "M", "鄛"), - (0x2F9E7, "M", "鈸"), - (0x2F9E8, "M", "鋗"), - (0x2F9E9, "M", "鋘"), - (0x2F9EA, "M", "鉼"), - (0x2F9EB, "M", "鏹"), - (0x2F9EC, "M", "鐕"), - (0x2F9ED, "M", "𨯺"), - (0x2F9EE, "M", "開"), - (0x2F9EF, "M", "䦕"), - (0x2F9F0, "M", "閷"), - (0x2F9F1, "M", "𨵷"), - (0x2F9F2, "M", "䧦"), - (0x2F9F3, "M", "雃"), - (0x2F9F4, "M", "嶲"), - (0x2F9F5, "M", "霣"), - (0x2F9F6, "M", "𩅅"), - (0x2F9F7, "M", "𩈚"), - (0x2F9F8, "M", "䩮"), - (0x2F9F9, "M", "䩶"), - (0x2F9FA, "M", "韠"), - (0x2F9FB, "M", "𩐊"), - (0x2F9FC, "M", "䪲"), - (0x2F9FD, "M", "𩒖"), - (0x2F9FE, "M", "頋"), - (0x2FA00, "M", "頩"), - (0x2FA01, "M", "𩖶"), - (0x2FA02, "M", "飢"), - (0x2FA03, "M", "䬳"), - (0x2FA04, "M", "餩"), - (0x2FA05, "M", "馧"), - (0x2FA06, "M", "駂"), - (0x2FA07, "M", "駾"), - (0x2FA08, "M", "䯎"), - (0x2FA09, "M", "𩬰"), - (0x2FA0A, "M", "鬒"), - (0x2FA0B, "M", "鱀"), - (0x2FA0C, "M", "鳽"), - (0x2FA0D, "M", "䳎"), - (0x2FA0E, "M", "䳭"), - (0x2FA0F, "M", "鵧"), - (0x2FA10, "M", "𪃎"), - (0x2FA11, "M", "䳸"), - (0x2FA12, "M", "𪄅"), - (0x2FA13, "M", "𪈎"), - (0x2FA14, "M", "𪊑"), - (0x2FA15, "M", "麻"), - (0x2FA16, "M", "䵖"), - (0x2FA17, "M", "黹"), - (0x2FA18, "M", "黾"), - (0x2FA19, "M", "鼅"), - (0x2FA1A, "M", "鼏"), - (0x2FA1B, "M", "鼖"), - (0x2FA1C, "M", "鼻"), - (0x2FA1D, "M", "𪘀"), - (0x2FA1E, "X"), - (0x30000, "V"), - (0x3134B, "X"), - (0x31350, "V"), - (0x323B0, "X"), - (0xE0100, "I"), - (0xE01F0, "X"), - ] - - -uts46data = tuple( - _seg_0() - + _seg_1() - + _seg_2() - + _seg_3() - + _seg_4() - + _seg_5() - + _seg_6() - + _seg_7() - + _seg_8() - + _seg_9() - + _seg_10() - + _seg_11() - + _seg_12() - + _seg_13() - + _seg_14() - + _seg_15() - + _seg_16() - + _seg_17() - + _seg_18() - + _seg_19() - + _seg_20() - + _seg_21() - + _seg_22() - + _seg_23() - + _seg_24() - + _seg_25() - + _seg_26() - + _seg_27() - + _seg_28() - + _seg_29() - + _seg_30() - + _seg_31() - + _seg_32() - + _seg_33() - + _seg_34() - + _seg_35() - + _seg_36() - + _seg_37() - + _seg_38() - + _seg_39() - + _seg_40() - + _seg_41() - + _seg_42() - + _seg_43() - + _seg_44() - + _seg_45() - + _seg_46() - + _seg_47() - + _seg_48() - + _seg_49() - + _seg_50() - + _seg_51() - + _seg_52() - + _seg_53() - + _seg_54() - + _seg_55() - + _seg_56() - + _seg_57() - + _seg_58() - + _seg_59() - + _seg_60() - + _seg_61() - + _seg_62() - + _seg_63() - + _seg_64() - + _seg_65() - + _seg_66() - + _seg_67() - + _seg_68() - + _seg_69() - + _seg_70() - + _seg_71() - + _seg_72() - + _seg_73() - + _seg_74() - + _seg_75() - + _seg_76() - + _seg_77() - + _seg_78() - + _seg_79() - + _seg_80() - + _seg_81() -) # type: Tuple[Union[Tuple[int, str], Tuple[int, str, str]], ...] diff --git a/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER b/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt b/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt deleted file mode 100644 index 7b190ca..0000000 --- a/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2011 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/METADATA b/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/METADATA deleted file mode 100644 index ddf5464..0000000 --- a/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/METADATA +++ /dev/null @@ -1,60 +0,0 @@ -Metadata-Version: 2.1 -Name: itsdangerous -Version: 2.2.0 -Summary: Safely pass data to untrusted environments and back. -Maintainer-email: Pallets -Requires-Python: >=3.8 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Typing :: Typed -Project-URL: Changes, https://itsdangerous.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://itsdangerous.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/itsdangerous/ - -# ItsDangerous - -... so better sign this - -Various helpers to pass data to untrusted environments and to get it -back safe and sound. Data is cryptographically signed to ensure that a -token has not been tampered with. - -It's possible to customize how data is serialized. Data is compressed as -needed. A timestamp can be added and verified automatically while -loading a token. - - -## A Simple Example - -Here's how you could generate a token for transmitting a user's id and -name between web requests. - -```python -from itsdangerous import URLSafeSerializer -auth_s = URLSafeSerializer("secret key", "auth") -token = auth_s.dumps({"id": 5, "name": "itsdangerous"}) - -print(token) -# eyJpZCI6NSwibmFtZSI6Iml0c2Rhbmdlcm91cyJ9.6YP6T0BaO67XP--9UzTrmurXSmg - -data = auth_s.loads(token) -print(data["name"]) -# itsdangerous -``` - - -## Donate - -The Pallets organization develops and supports ItsDangerous and other -popular packages. In order to grow the community of contributors and -users, and allow the maintainers to devote more time to the projects, -[please donate today][]. - -[please donate today]: https://palletsprojects.com/donate - diff --git a/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/RECORD b/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/RECORD deleted file mode 100644 index 1394876..0000000 --- a/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/RECORD +++ /dev/null @@ -1,22 +0,0 @@ -itsdangerous-2.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -itsdangerous-2.2.0.dist-info/LICENSE.txt,sha256=Y68JiRtr6K0aQlLtQ68PTvun_JSOIoNnvtfzxa4LCdc,1475 -itsdangerous-2.2.0.dist-info/METADATA,sha256=0rk0-1ZwihuU5DnwJVwPWoEI4yWOyCexih3JyZHblhE,1924 -itsdangerous-2.2.0.dist-info/RECORD,, -itsdangerous-2.2.0.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -itsdangerous/__init__.py,sha256=4SK75sCe29xbRgQE1ZQtMHnKUuZYAf3bSpZOrff1IAY,1427 -itsdangerous/__pycache__/__init__.cpython-312.pyc,, -itsdangerous/__pycache__/_json.cpython-312.pyc,, -itsdangerous/__pycache__/encoding.cpython-312.pyc,, -itsdangerous/__pycache__/exc.cpython-312.pyc,, -itsdangerous/__pycache__/serializer.cpython-312.pyc,, -itsdangerous/__pycache__/signer.cpython-312.pyc,, -itsdangerous/__pycache__/timed.cpython-312.pyc,, -itsdangerous/__pycache__/url_safe.cpython-312.pyc,, -itsdangerous/_json.py,sha256=wPQGmge2yZ9328EHKF6gadGeyGYCJQKxtU-iLKE6UnA,473 -itsdangerous/encoding.py,sha256=wwTz5q_3zLcaAdunk6_vSoStwGqYWe307Zl_U87aRFM,1409 -itsdangerous/exc.py,sha256=Rr3exo0MRFEcPZltwecyK16VV1bE2K9_F1-d-ljcUn4,3201 -itsdangerous/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -itsdangerous/serializer.py,sha256=PmdwADLqkSyQLZ0jOKAgDsAW4k_H0TlA71Ei3z0C5aI,15601 -itsdangerous/signer.py,sha256=YO0CV7NBvHA6j549REHJFUjUojw2pHqwcUpQnU7yNYQ,9647 -itsdangerous/timed.py,sha256=6RvDMqNumGMxf0-HlpaZdN9PUQQmRvrQGplKhxuivUs,8083 -itsdangerous/url_safe.py,sha256=az4e5fXi_vs-YbWj8YZwn4wiVKfeD--GEKRT5Ueu4P4,2505 diff --git a/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/WHEEL b/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/myenv/Lib/site-packages/itsdangerous-2.2.0.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/itsdangerous/__init__.py b/myenv/Lib/site-packages/itsdangerous/__init__.py deleted file mode 100644 index ea55256..0000000 --- a/myenv/Lib/site-packages/itsdangerous/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -from __future__ import annotations - -import typing as t - -from .encoding import base64_decode as base64_decode -from .encoding import base64_encode as base64_encode -from .encoding import want_bytes as want_bytes -from .exc import BadData as BadData -from .exc import BadHeader as BadHeader -from .exc import BadPayload as BadPayload -from .exc import BadSignature as BadSignature -from .exc import BadTimeSignature as BadTimeSignature -from .exc import SignatureExpired as SignatureExpired -from .serializer import Serializer as Serializer -from .signer import HMACAlgorithm as HMACAlgorithm -from .signer import NoneAlgorithm as NoneAlgorithm -from .signer import Signer as Signer -from .timed import TimedSerializer as TimedSerializer -from .timed import TimestampSigner as TimestampSigner -from .url_safe import URLSafeSerializer as URLSafeSerializer -from .url_safe import URLSafeTimedSerializer as URLSafeTimedSerializer - - -def __getattr__(name: str) -> t.Any: - if name == "__version__": - import importlib.metadata - import warnings - - warnings.warn( - "The '__version__' attribute is deprecated and will be removed in" - " ItsDangerous 2.3. Use feature detection or" - " 'importlib.metadata.version(\"itsdangerous\")' instead.", - DeprecationWarning, - stacklevel=2, - ) - return importlib.metadata.version("itsdangerous") - - raise AttributeError(name) diff --git a/myenv/Lib/site-packages/itsdangerous/_json.py b/myenv/Lib/site-packages/itsdangerous/_json.py deleted file mode 100644 index fc23fea..0000000 --- a/myenv/Lib/site-packages/itsdangerous/_json.py +++ /dev/null @@ -1,18 +0,0 @@ -from __future__ import annotations - -import json as _json -import typing as t - - -class _CompactJSON: - """Wrapper around json module that strips whitespace.""" - - @staticmethod - def loads(payload: str | bytes) -> t.Any: - return _json.loads(payload) - - @staticmethod - def dumps(obj: t.Any, **kwargs: t.Any) -> str: - kwargs.setdefault("ensure_ascii", False) - kwargs.setdefault("separators", (",", ":")) - return _json.dumps(obj, **kwargs) diff --git a/myenv/Lib/site-packages/itsdangerous/encoding.py b/myenv/Lib/site-packages/itsdangerous/encoding.py deleted file mode 100644 index f5ca80f..0000000 --- a/myenv/Lib/site-packages/itsdangerous/encoding.py +++ /dev/null @@ -1,54 +0,0 @@ -from __future__ import annotations - -import base64 -import string -import struct -import typing as t - -from .exc import BadData - - -def want_bytes( - s: str | bytes, encoding: str = "utf-8", errors: str = "strict" -) -> bytes: - if isinstance(s, str): - s = s.encode(encoding, errors) - - return s - - -def base64_encode(string: str | bytes) -> bytes: - """Base64 encode a string of bytes or text. The resulting bytes are - safe to use in URLs. - """ - string = want_bytes(string) - return base64.urlsafe_b64encode(string).rstrip(b"=") - - -def base64_decode(string: str | bytes) -> bytes: - """Base64 decode a URL-safe string of bytes or text. The result is - bytes. - """ - string = want_bytes(string, encoding="ascii", errors="ignore") - string += b"=" * (-len(string) % 4) - - try: - return base64.urlsafe_b64decode(string) - except (TypeError, ValueError) as e: - raise BadData("Invalid base64-encoded data") from e - - -# The alphabet used by base64.urlsafe_* -_base64_alphabet = f"{string.ascii_letters}{string.digits}-_=".encode("ascii") - -_int64_struct = struct.Struct(">Q") -_int_to_bytes = _int64_struct.pack -_bytes_to_int = t.cast("t.Callable[[bytes], tuple[int]]", _int64_struct.unpack) - - -def int_to_bytes(num: int) -> bytes: - return _int_to_bytes(num).lstrip(b"\x00") - - -def bytes_to_int(bytestr: bytes) -> int: - return _bytes_to_int(bytestr.rjust(8, b"\x00"))[0] diff --git a/myenv/Lib/site-packages/itsdangerous/exc.py b/myenv/Lib/site-packages/itsdangerous/exc.py deleted file mode 100644 index a75adcd..0000000 --- a/myenv/Lib/site-packages/itsdangerous/exc.py +++ /dev/null @@ -1,106 +0,0 @@ -from __future__ import annotations - -import typing as t -from datetime import datetime - - -class BadData(Exception): - """Raised if bad data of any sort was encountered. This is the base - for all exceptions that ItsDangerous defines. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str): - super().__init__(message) - self.message = message - - def __str__(self) -> str: - return self.message - - -class BadSignature(BadData): - """Raised if a signature does not match.""" - - def __init__(self, message: str, payload: t.Any | None = None): - super().__init__(message) - - #: The payload that failed the signature test. In some - #: situations you might still want to inspect this, even if - #: you know it was tampered with. - #: - #: .. versionadded:: 0.14 - self.payload: t.Any | None = payload - - -class BadTimeSignature(BadSignature): - """Raised if a time-based signature is invalid. This is a subclass - of :class:`BadSignature`. - """ - - def __init__( - self, - message: str, - payload: t.Any | None = None, - date_signed: datetime | None = None, - ): - super().__init__(message, payload) - - #: If the signature expired this exposes the date of when the - #: signature was created. This can be helpful in order to - #: tell the user how long a link has been gone stale. - #: - #: .. versionchanged:: 2.0 - #: The datetime value is timezone-aware rather than naive. - #: - #: .. versionadded:: 0.14 - self.date_signed = date_signed - - -class SignatureExpired(BadTimeSignature): - """Raised if a signature timestamp is older than ``max_age``. This - is a subclass of :exc:`BadTimeSignature`. - """ - - -class BadHeader(BadSignature): - """Raised if a signed header is invalid in some form. This only - happens for serializers that have a header that goes with the - signature. - - .. versionadded:: 0.24 - """ - - def __init__( - self, - message: str, - payload: t.Any | None = None, - header: t.Any | None = None, - original_error: Exception | None = None, - ): - super().__init__(message, payload) - - #: If the header is actually available but just malformed it - #: might be stored here. - self.header: t.Any | None = header - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: Exception | None = original_error - - -class BadPayload(BadData): - """Raised if a payload is invalid. This could happen if the payload - is loaded despite an invalid signature, or if there is a mismatch - between the serializer and deserializer. The original exception - that occurred during loading is stored on as :attr:`original_error`. - - .. versionadded:: 0.15 - """ - - def __init__(self, message: str, original_error: Exception | None = None): - super().__init__(message) - - #: If available, the error that indicates why the payload was - #: not valid. This might be ``None``. - self.original_error: Exception | None = original_error diff --git a/myenv/Lib/site-packages/itsdangerous/py.typed b/myenv/Lib/site-packages/itsdangerous/py.typed deleted file mode 100644 index e69de29..0000000 diff --git a/myenv/Lib/site-packages/itsdangerous/serializer.py b/myenv/Lib/site-packages/itsdangerous/serializer.py deleted file mode 100644 index 5ddf387..0000000 --- a/myenv/Lib/site-packages/itsdangerous/serializer.py +++ /dev/null @@ -1,406 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import json -import typing as t - -from .encoding import want_bytes -from .exc import BadPayload -from .exc import BadSignature -from .signer import _make_keys_list -from .signer import Signer - -if t.TYPE_CHECKING: - import typing_extensions as te - - # This should be either be str or bytes. To avoid having to specify the - # bound type, it falls back to a union if structural matching fails. - _TSerialized = te.TypeVar( - "_TSerialized", bound=t.Union[str, bytes], default=t.Union[str, bytes] - ) -else: - # Still available at runtime on Python < 3.13, but without the default. - _TSerialized = t.TypeVar("_TSerialized", bound=t.Union[str, bytes]) - - -class _PDataSerializer(t.Protocol[_TSerialized]): - def loads(self, payload: _TSerialized, /) -> t.Any: ... - # A signature with additional arguments is not handled correctly by type - # checkers right now, so an overload is used below for serializers that - # don't match this strict protocol. - def dumps(self, obj: t.Any, /) -> _TSerialized: ... - - -# Use TypeIs once it's available in typing_extensions or 3.13. -def is_text_serializer( - serializer: _PDataSerializer[t.Any], -) -> te.TypeGuard[_PDataSerializer[str]]: - """Checks whether a serializer generates text or binary.""" - return isinstance(serializer.dumps({}), str) - - -class Serializer(t.Generic[_TSerialized]): - """A serializer wraps a :class:`~itsdangerous.signer.Signer` to - enable serializing and securely signing data other than bytes. It - can unsign to verify that the data hasn't been changed. - - The serializer provides :meth:`dumps` and :meth:`loads`, similar to - :mod:`json`, and by default uses :mod:`json` internally to serialize - the data to bytes. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param serializer: An object that provides ``dumps`` and ``loads`` - methods for serializing data to a string. Defaults to - :attr:`default_serializer`, which defaults to :mod:`json`. - :param serializer_kwargs: Keyword arguments to pass when calling - ``serializer.dumps``. - :param signer: A ``Signer`` class to instantiate when signing data. - Defaults to :attr:`default_signer`, which defaults to - :class:`~itsdangerous.signer.Signer`. - :param signer_kwargs: Keyword arguments to pass when instantiating - the ``Signer`` class. - :param fallback_signers: List of signer parameters to try when - unsigning with the default signer fails. Each item can be a dict - of ``signer_kwargs``, a ``Signer`` class, or a tuple of - ``(signer, signer_kwargs)``. Defaults to - :attr:`default_fallback_signers`. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 2.0 - Removed the default SHA-512 fallback signer from - ``default_fallback_signers``. - - .. versionchanged:: 1.1 - Added support for ``fallback_signers`` and configured a default - SHA-512 fallback. This fallback is for users who used the yanked - 1.0.0 release which defaulted to SHA-512. - - .. versionchanged:: 0.14 - The ``signer`` and ``signer_kwargs`` parameters were added to - the constructor. - """ - - #: The default serialization module to use to serialize data to a - #: string internally. The default is :mod:`json`, but can be changed - #: to any object that provides ``dumps`` and ``loads`` methods. - default_serializer: _PDataSerializer[t.Any] = json - - #: The default ``Signer`` class to instantiate when signing data. - #: The default is :class:`itsdangerous.signer.Signer`. - default_signer: type[Signer] = Signer - - #: The default fallback signers to try when unsigning fails. - default_fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] = [] - - # Serializer[str] if no data serializer is provided, or if it returns str. - @t.overload - def __init__( - self: Serializer[str], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - serializer: None | _PDataSerializer[str] = None, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Serializer[bytes] with a bytes data serializer positional argument. - @t.overload - def __init__( - self: Serializer[bytes], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None, - serializer: _PDataSerializer[bytes], - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Serializer[bytes] with a bytes data serializer keyword argument. - @t.overload - def __init__( - self: Serializer[bytes], - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - *, - serializer: _PDataSerializer[bytes], - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Fall back with a positional argument. If the strict signature of - # _PDataSerializer doesn't match, fall back to a union, requiring the user - # to specify the type. - @t.overload - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None, - serializer: t.Any, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - # Fall back with a keyword argument. - @t.overload - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - *, - serializer: t.Any, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): ... - - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous", - serializer: t.Any | None = None, - serializer_kwargs: dict[str, t.Any] | None = None, - signer: type[Signer] | None = None, - signer_kwargs: dict[str, t.Any] | None = None, - fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] - | None = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: list[bytes] = _make_keys_list(secret_key) - - if salt is not None: - salt = want_bytes(salt) - # if salt is None then the signer's default is used - - self.salt = salt - - if serializer is None: - serializer = self.default_serializer - - self.serializer: _PDataSerializer[_TSerialized] = serializer - self.is_text_serializer: bool = is_text_serializer(serializer) - - if signer is None: - signer = self.default_signer - - self.signer: type[Signer] = signer - self.signer_kwargs: dict[str, t.Any] = signer_kwargs or {} - - if fallback_signers is None: - fallback_signers = list(self.default_fallback_signers) - - self.fallback_signers: list[ - dict[str, t.Any] | tuple[type[Signer], dict[str, t.Any]] | type[Signer] - ] = fallback_signers - self.serializer_kwargs: dict[str, t.Any] = serializer_kwargs or {} - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def load_payload( - self, payload: bytes, serializer: _PDataSerializer[t.Any] | None = None - ) -> t.Any: - """Loads the encoded object. This function raises - :class:`.BadPayload` if the payload is not valid. The - ``serializer`` parameter can be used to override the serializer - stored on the class. The encoded ``payload`` should always be - bytes. - """ - if serializer is None: - use_serializer = self.serializer - is_text = self.is_text_serializer - else: - use_serializer = serializer - is_text = is_text_serializer(serializer) - - try: - if is_text: - return use_serializer.loads(payload.decode("utf-8")) # type: ignore[arg-type] - - return use_serializer.loads(payload) # type: ignore[arg-type] - except Exception as e: - raise BadPayload( - "Could not load the payload because an exception" - " occurred on unserializing the data.", - original_error=e, - ) from e - - def dump_payload(self, obj: t.Any) -> bytes: - """Dumps the encoded object. The return value is always bytes. - If the internal serializer returns text, the value will be - encoded as UTF-8. - """ - return want_bytes(self.serializer.dumps(obj, **self.serializer_kwargs)) - - def make_signer(self, salt: str | bytes | None = None) -> Signer: - """Creates a new instance of the signer to be used. The default - implementation uses the :class:`.Signer` base class. - """ - if salt is None: - salt = self.salt - - return self.signer(self.secret_keys, salt=salt, **self.signer_kwargs) - - def iter_unsigners(self, salt: str | bytes | None = None) -> cabc.Iterator[Signer]: - """Iterates over all signers to be tried for unsigning. Starts - with the configured signer, then constructs each signer - specified in ``fallback_signers``. - """ - if salt is None: - salt = self.salt - - yield self.make_signer(salt) - - for fallback in self.fallback_signers: - if isinstance(fallback, dict): - kwargs = fallback - fallback = self.signer - elif isinstance(fallback, tuple): - fallback, kwargs = fallback - else: - kwargs = self.signer_kwargs - - for secret_key in self.secret_keys: - yield fallback(secret_key, salt=salt, **kwargs) - - def dumps(self, obj: t.Any, salt: str | bytes | None = None) -> _TSerialized: - """Returns a signed string serialized with the internal - serializer. The return value can be either a byte or unicode - string depending on the format of the internal serializer. - """ - payload = want_bytes(self.dump_payload(obj)) - rv = self.make_signer(salt).sign(payload) - - if self.is_text_serializer: - return rv.decode("utf-8") # type: ignore[return-value] - - return rv # type: ignore[return-value] - - def dump(self, obj: t.Any, f: t.IO[t.Any], salt: str | bytes | None = None) -> None: - """Like :meth:`dumps` but dumps into a file. The file handle has - to be compatible with what the internal serializer expects. - """ - f.write(self.dumps(obj, salt)) - - def loads( - self, s: str | bytes, salt: str | bytes | None = None, **kwargs: t.Any - ) -> t.Any: - """Reverse of :meth:`dumps`. Raises :exc:`.BadSignature` if the - signature validation fails. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - return self.load_payload(signer.unsign(s)) - except BadSignature as err: - last_exception = err - - raise t.cast(BadSignature, last_exception) - - def load(self, f: t.IO[t.Any], salt: str | bytes | None = None) -> t.Any: - """Like :meth:`loads` but loads from a file.""" - return self.loads(f.read(), salt) - - def loads_unsafe( - self, s: str | bytes, salt: str | bytes | None = None - ) -> tuple[bool, t.Any]: - """Like :meth:`loads` but without verifying the signature. This - is potentially very dangerous to use depending on how your - serializer works. The return value is ``(signature_valid, - payload)`` instead of just the payload. The first item will be a - boolean that indicates if the signature is valid. This function - never fails. - - Use it for debugging only and if you know that your serializer - module is not exploitable (for example, do not use it with a - pickle serializer). - - .. versionadded:: 0.15 - """ - return self._loads_unsafe_impl(s, salt) - - def _loads_unsafe_impl( - self, - s: str | bytes, - salt: str | bytes | None, - load_kwargs: dict[str, t.Any] | None = None, - load_payload_kwargs: dict[str, t.Any] | None = None, - ) -> tuple[bool, t.Any]: - """Low level helper function to implement :meth:`loads_unsafe` - in serializer subclasses. - """ - if load_kwargs is None: - load_kwargs = {} - - try: - return True, self.loads(s, salt=salt, **load_kwargs) - except BadSignature as e: - if e.payload is None: - return False, None - - if load_payload_kwargs is None: - load_payload_kwargs = {} - - try: - return ( - False, - self.load_payload(e.payload, **load_payload_kwargs), - ) - except BadPayload: - return False, None - - def load_unsafe( - self, f: t.IO[t.Any], salt: str | bytes | None = None - ) -> tuple[bool, t.Any]: - """Like :meth:`loads_unsafe` but loads from a file. - - .. versionadded:: 0.15 - """ - return self.loads_unsafe(f.read(), salt=salt) diff --git a/myenv/Lib/site-packages/itsdangerous/signer.py b/myenv/Lib/site-packages/itsdangerous/signer.py deleted file mode 100644 index e324dc0..0000000 --- a/myenv/Lib/site-packages/itsdangerous/signer.py +++ /dev/null @@ -1,266 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import hashlib -import hmac -import typing as t - -from .encoding import _base64_alphabet -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import want_bytes -from .exc import BadSignature - - -class SigningAlgorithm: - """Subclasses must implement :meth:`get_signature` to provide - signature generation functionality. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - """Returns the signature for the given key and value.""" - raise NotImplementedError() - - def verify_signature(self, key: bytes, value: bytes, sig: bytes) -> bool: - """Verifies the given signature matches the expected - signature. - """ - return hmac.compare_digest(sig, self.get_signature(key, value)) - - -class NoneAlgorithm(SigningAlgorithm): - """Provides an algorithm that does not perform any signing and - returns an empty signature. - """ - - def get_signature(self, key: bytes, value: bytes) -> bytes: - return b"" - - -def _lazy_sha1(string: bytes = b"") -> t.Any: - """Don't access ``hashlib.sha1`` until runtime. FIPS builds may not include - SHA-1, in which case the import and use as a default would fail before the - developer can configure something else. - """ - return hashlib.sha1(string) - - -class HMACAlgorithm(SigningAlgorithm): - """Provides signature generation using HMACs.""" - - #: The digest method to use with the MAC algorithm. This defaults to - #: SHA1, but can be changed to any other function in the hashlib - #: module. - default_digest_method: t.Any = staticmethod(_lazy_sha1) - - def __init__(self, digest_method: t.Any = None): - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: t.Any = digest_method - - def get_signature(self, key: bytes, value: bytes) -> bytes: - mac = hmac.new(key, msg=value, digestmod=self.digest_method) - return mac.digest() - - -def _make_keys_list( - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], -) -> list[bytes]: - if isinstance(secret_key, (str, bytes)): - return [want_bytes(secret_key)] - - return [want_bytes(s) for s in secret_key] # pyright: ignore - - -class Signer: - """A signer securely signs bytes, then unsigns them to verify that - the value hasn't been changed. - - The secret key should be a random string of ``bytes`` and should not - be saved to code or version control. Different salts should be used - to distinguish signing in different contexts. See :doc:`/concepts` - for information about the security of the secret key and salt. - - :param secret_key: The secret key to sign and verify with. Can be a - list of keys, oldest to newest, to support key rotation. - :param salt: Extra key to combine with ``secret_key`` to distinguish - signatures in different contexts. - :param sep: Separator between the signature and value. - :param key_derivation: How to derive the signing key from the secret - key and salt. Possible values are ``concat``, ``django-concat``, - or ``hmac``. Defaults to :attr:`default_key_derivation`, which - defaults to ``django-concat``. - :param digest_method: Hash function to use when generating the HMAC - signature. Defaults to :attr:`default_digest_method`, which - defaults to :func:`hashlib.sha1`. Note that the security of the - hash alone doesn't apply when used intermediately in HMAC. - :param algorithm: A :class:`SigningAlgorithm` instance to use - instead of building a default :class:`HMACAlgorithm` with the - ``digest_method``. - - .. versionchanged:: 2.0 - Added support for key rotation by passing a list to - ``secret_key``. - - .. versionchanged:: 0.18 - ``algorithm`` was added as an argument to the class constructor. - - .. versionchanged:: 0.14 - ``key_derivation`` and ``digest_method`` were added as arguments - to the class constructor. - """ - - #: The default digest method to use for the signer. The default is - #: :func:`hashlib.sha1`, but can be changed to any :mod:`hashlib` or - #: compatible object. Note that the security of the hash alone - #: doesn't apply when used intermediately in HMAC. - #: - #: .. versionadded:: 0.14 - default_digest_method: t.Any = staticmethod(_lazy_sha1) - - #: The default scheme to use to derive the signing key from the - #: secret key and salt. The default is ``django-concat``. Possible - #: values are ``concat``, ``django-concat``, and ``hmac``. - #: - #: .. versionadded:: 0.14 - default_key_derivation: str = "django-concat" - - def __init__( - self, - secret_key: str | bytes | cabc.Iterable[str] | cabc.Iterable[bytes], - salt: str | bytes | None = b"itsdangerous.Signer", - sep: str | bytes = b".", - key_derivation: str | None = None, - digest_method: t.Any | None = None, - algorithm: SigningAlgorithm | None = None, - ): - #: The list of secret keys to try for verifying signatures, from - #: oldest to newest. The newest (last) key is used for signing. - #: - #: This allows a key rotation system to keep a list of allowed - #: keys and remove expired ones. - self.secret_keys: list[bytes] = _make_keys_list(secret_key) - self.sep: bytes = want_bytes(sep) - - if self.sep in _base64_alphabet: - raise ValueError( - "The given separator cannot be used because it may be" - " contained in the signature itself. ASCII letters," - " digits, and '-_=' must not be used." - ) - - if salt is not None: - salt = want_bytes(salt) - else: - salt = b"itsdangerous.Signer" - - self.salt = salt - - if key_derivation is None: - key_derivation = self.default_key_derivation - - self.key_derivation: str = key_derivation - - if digest_method is None: - digest_method = self.default_digest_method - - self.digest_method: t.Any = digest_method - - if algorithm is None: - algorithm = HMACAlgorithm(self.digest_method) - - self.algorithm: SigningAlgorithm = algorithm - - @property - def secret_key(self) -> bytes: - """The newest (last) entry in the :attr:`secret_keys` list. This - is for compatibility from before key rotation support was added. - """ - return self.secret_keys[-1] - - def derive_key(self, secret_key: str | bytes | None = None) -> bytes: - """This method is called to derive the key. The default key - derivation choices can be overridden here. Key derivation is not - intended to be used as a security method to make a complex key - out of a short password. Instead you should use large random - secret keys. - - :param secret_key: A specific secret key to derive from. - Defaults to the last item in :attr:`secret_keys`. - - .. versionchanged:: 2.0 - Added the ``secret_key`` parameter. - """ - if secret_key is None: - secret_key = self.secret_keys[-1] - else: - secret_key = want_bytes(secret_key) - - if self.key_derivation == "concat": - return t.cast(bytes, self.digest_method(self.salt + secret_key).digest()) - elif self.key_derivation == "django-concat": - return t.cast( - bytes, self.digest_method(self.salt + b"signer" + secret_key).digest() - ) - elif self.key_derivation == "hmac": - mac = hmac.new(secret_key, digestmod=self.digest_method) - mac.update(self.salt) - return mac.digest() - elif self.key_derivation == "none": - return secret_key - else: - raise TypeError("Unknown key derivation method") - - def get_signature(self, value: str | bytes) -> bytes: - """Returns the signature for the given value.""" - value = want_bytes(value) - key = self.derive_key() - sig = self.algorithm.get_signature(key, value) - return base64_encode(sig) - - def sign(self, value: str | bytes) -> bytes: - """Signs the given string.""" - value = want_bytes(value) - return value + self.sep + self.get_signature(value) - - def verify_signature(self, value: str | bytes, sig: str | bytes) -> bool: - """Verifies the signature for the given value.""" - try: - sig = base64_decode(sig) - except Exception: - return False - - value = want_bytes(value) - - for secret_key in reversed(self.secret_keys): - key = self.derive_key(secret_key) - - if self.algorithm.verify_signature(key, value, sig): - return True - - return False - - def unsign(self, signed_value: str | bytes) -> bytes: - """Unsigns the given string.""" - signed_value = want_bytes(signed_value) - - if self.sep not in signed_value: - raise BadSignature(f"No {self.sep!r} found in value") - - value, sig = signed_value.rsplit(self.sep, 1) - - if self.verify_signature(value, sig): - return value - - raise BadSignature(f"Signature {sig!r} does not match", payload=value) - - def validate(self, signed_value: str | bytes) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid. - """ - try: - self.unsign(signed_value) - return True - except BadSignature: - return False diff --git a/myenv/Lib/site-packages/itsdangerous/timed.py b/myenv/Lib/site-packages/itsdangerous/timed.py deleted file mode 100644 index 7384375..0000000 --- a/myenv/Lib/site-packages/itsdangerous/timed.py +++ /dev/null @@ -1,228 +0,0 @@ -from __future__ import annotations - -import collections.abc as cabc -import time -import typing as t -from datetime import datetime -from datetime import timezone - -from .encoding import base64_decode -from .encoding import base64_encode -from .encoding import bytes_to_int -from .encoding import int_to_bytes -from .encoding import want_bytes -from .exc import BadSignature -from .exc import BadTimeSignature -from .exc import SignatureExpired -from .serializer import _TSerialized -from .serializer import Serializer -from .signer import Signer - - -class TimestampSigner(Signer): - """Works like the regular :class:`.Signer` but also records the time - of the signing and can be used to expire signatures. The - :meth:`unsign` method can raise :exc:`.SignatureExpired` if the - unsigning failed because the signature is expired. - """ - - def get_timestamp(self) -> int: - """Returns the current timestamp. The function must return an - integer. - """ - return int(time.time()) - - def timestamp_to_datetime(self, ts: int) -> datetime: - """Convert the timestamp from :meth:`get_timestamp` into an - aware :class`datetime.datetime` in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - return datetime.fromtimestamp(ts, tz=timezone.utc) - - def sign(self, value: str | bytes) -> bytes: - """Signs the given string and also attaches time information.""" - value = want_bytes(value) - timestamp = base64_encode(int_to_bytes(self.get_timestamp())) - sep = want_bytes(self.sep) - value = value + sep + timestamp - return value + sep + self.get_signature(value) - - # Ignore overlapping signatures check, return_timestamp is the only - # parameter that affects the return type. - - @t.overload - def unsign( # type: ignore[overload-overlap] - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: t.Literal[False] = False, - ) -> bytes: ... - - @t.overload - def unsign( - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: t.Literal[True] = True, - ) -> tuple[bytes, datetime]: ... - - def unsign( - self, - signed_value: str | bytes, - max_age: int | None = None, - return_timestamp: bool = False, - ) -> tuple[bytes, datetime] | bytes: - """Works like the regular :meth:`.Signer.unsign` but can also - validate the time. See the base docstring of the class for - the general behavior. If ``return_timestamp`` is ``True`` the - timestamp of the signature will be returned as an aware - :class:`datetime.datetime` object in UTC. - - .. versionchanged:: 2.0 - The timestamp is returned as a timezone-aware ``datetime`` - in UTC rather than a naive ``datetime`` assumed to be UTC. - """ - try: - result = super().unsign(signed_value) - sig_error = None - except BadSignature as e: - sig_error = e - result = e.payload or b"" - - sep = want_bytes(self.sep) - - # If there is no timestamp in the result there is something - # seriously wrong. In case there was a signature error, we raise - # that one directly, otherwise we have a weird situation in - # which we shouldn't have come except someone uses a time-based - # serializer on non-timestamp data, so catch that. - if sep not in result: - if sig_error: - raise sig_error - - raise BadTimeSignature("timestamp missing", payload=result) - - value, ts_bytes = result.rsplit(sep, 1) - ts_int: int | None = None - ts_dt: datetime | None = None - - try: - ts_int = bytes_to_int(base64_decode(ts_bytes)) - except Exception: - pass - - # Signature is *not* okay. Raise a proper error now that we have - # split the value and the timestamp. - if sig_error is not None: - if ts_int is not None: - try: - ts_dt = self.timestamp_to_datetime(ts_int) - except (ValueError, OSError, OverflowError) as exc: - # Windows raises OSError - # 32-bit raises OverflowError - raise BadTimeSignature( - "Malformed timestamp", payload=value - ) from exc - - raise BadTimeSignature(str(sig_error), payload=value, date_signed=ts_dt) - - # Signature was okay but the timestamp is actually not there or - # malformed. Should not happen, but we handle it anyway. - if ts_int is None: - raise BadTimeSignature("Malformed timestamp", payload=value) - - # Check timestamp is not older than max_age - if max_age is not None: - age = self.get_timestamp() - ts_int - - if age > max_age: - raise SignatureExpired( - f"Signature age {age} > {max_age} seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if age < 0: - raise SignatureExpired( - f"Signature age {age} < 0 seconds", - payload=value, - date_signed=self.timestamp_to_datetime(ts_int), - ) - - if return_timestamp: - return value, self.timestamp_to_datetime(ts_int) - - return value - - def validate(self, signed_value: str | bytes, max_age: int | None = None) -> bool: - """Only validates the given signed value. Returns ``True`` if - the signature exists and is valid.""" - try: - self.unsign(signed_value, max_age=max_age) - return True - except BadSignature: - return False - - -class TimedSerializer(Serializer[_TSerialized]): - """Uses :class:`TimestampSigner` instead of the default - :class:`.Signer`. - """ - - default_signer: type[TimestampSigner] = TimestampSigner - - def iter_unsigners( - self, salt: str | bytes | None = None - ) -> cabc.Iterator[TimestampSigner]: - return t.cast("cabc.Iterator[TimestampSigner]", super().iter_unsigners(salt)) - - # TODO: Signature is incompatible because parameters were added - # before salt. - - def loads( # type: ignore[override] - self, - s: str | bytes, - max_age: int | None = None, - return_timestamp: bool = False, - salt: str | bytes | None = None, - ) -> t.Any: - """Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the - signature validation fails. If a ``max_age`` is provided it will - ensure the signature is not older than that time in seconds. In - case the signature is outdated, :exc:`.SignatureExpired` is - raised. All arguments are forwarded to the signer's - :meth:`~TimestampSigner.unsign` method. - """ - s = want_bytes(s) - last_exception = None - - for signer in self.iter_unsigners(salt): - try: - base64d, timestamp = signer.unsign( - s, max_age=max_age, return_timestamp=True - ) - payload = self.load_payload(base64d) - - if return_timestamp: - return payload, timestamp - - return payload - except SignatureExpired: - # The signature was unsigned successfully but was - # expired. Do not try the next signer. - raise - except BadSignature as err: - last_exception = err - - raise t.cast(BadSignature, last_exception) - - def loads_unsafe( # type: ignore[override] - self, - s: str | bytes, - max_age: int | None = None, - salt: str | bytes | None = None, - ) -> tuple[bool, t.Any]: - return self._loads_unsafe_impl(s, salt, load_kwargs={"max_age": max_age}) diff --git a/myenv/Lib/site-packages/itsdangerous/url_safe.py b/myenv/Lib/site-packages/itsdangerous/url_safe.py deleted file mode 100644 index 56a0793..0000000 --- a/myenv/Lib/site-packages/itsdangerous/url_safe.py +++ /dev/null @@ -1,83 +0,0 @@ -from __future__ import annotations - -import typing as t -import zlib - -from ._json import _CompactJSON -from .encoding import base64_decode -from .encoding import base64_encode -from .exc import BadPayload -from .serializer import _PDataSerializer -from .serializer import Serializer -from .timed import TimedSerializer - - -class URLSafeSerializerMixin(Serializer[str]): - """Mixed in with a regular serializer it will attempt to zlib - compress the string to make it shorter if necessary. It will also - base64 encode the string so that it can safely be placed in a URL. - """ - - default_serializer: _PDataSerializer[str] = _CompactJSON - - def load_payload( - self, - payload: bytes, - *args: t.Any, - serializer: t.Any | None = None, - **kwargs: t.Any, - ) -> t.Any: - decompress = False - - if payload.startswith(b"."): - payload = payload[1:] - decompress = True - - try: - json = base64_decode(payload) - except Exception as e: - raise BadPayload( - "Could not base64 decode the payload because of an exception", - original_error=e, - ) from e - - if decompress: - try: - json = zlib.decompress(json) - except Exception as e: - raise BadPayload( - "Could not zlib decompress the payload before decoding the payload", - original_error=e, - ) from e - - return super().load_payload(json, *args, **kwargs) - - def dump_payload(self, obj: t.Any) -> bytes: - json = super().dump_payload(obj) - is_compressed = False - compressed = zlib.compress(json) - - if len(compressed) < (len(json) - 1): - json = compressed - is_compressed = True - - base64d = base64_encode(json) - - if is_compressed: - base64d = b"." + base64d - - return base64d - - -class URLSafeSerializer(URLSafeSerializerMixin, Serializer[str]): - """Works like :class:`.Serializer` but dumps and loads into a URL - safe string consisting of the upper and lowercase character of the - alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ - - -class URLSafeTimedSerializer(URLSafeSerializerMixin, TimedSerializer[str]): - """Works like :class:`.TimedSerializer` but dumps and loads into a - URL safe string consisting of the upper and lowercase character of - the alphabet as well as ``'_'``, ``'-'`` and ``'.'``. - """ diff --git a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/INSTALLER b/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/INSTALLER deleted file mode 100644 index a1b589e..0000000 --- a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/INSTALLER +++ /dev/null @@ -1 +0,0 @@ -pip diff --git a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/LICENSE.txt b/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/LICENSE.txt deleted file mode 100644 index c37cae4..0000000 --- a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/LICENSE.txt +++ /dev/null @@ -1,28 +0,0 @@ -Copyright 2007 Pallets - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - -1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its - contributors may be used to endorse or promote products derived from - this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A -PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED -TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/METADATA b/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/METADATA deleted file mode 100644 index 265cc32..0000000 --- a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/METADATA +++ /dev/null @@ -1,76 +0,0 @@ -Metadata-Version: 2.1 -Name: Jinja2 -Version: 3.1.4 -Summary: A very fast and expressive template engine. -Maintainer-email: Pallets -Requires-Python: >=3.7 -Description-Content-Type: text/markdown -Classifier: Development Status :: 5 - Production/Stable -Classifier: Environment :: Web Environment -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python -Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content -Classifier: Topic :: Text Processing :: Markup :: HTML -Classifier: Typing :: Typed -Requires-Dist: MarkupSafe>=2.0 -Requires-Dist: Babel>=2.7 ; extra == "i18n" -Project-URL: Changes, https://jinja.palletsprojects.com/changes/ -Project-URL: Chat, https://discord.gg/pallets -Project-URL: Documentation, https://jinja.palletsprojects.com/ -Project-URL: Donate, https://palletsprojects.com/donate -Project-URL: Source, https://github.com/pallets/jinja/ -Provides-Extra: i18n - -# Jinja - -Jinja is a fast, expressive, extensible templating engine. Special -placeholders in the template allow writing code similar to Python -syntax. Then the template is passed data to render the final document. - -It includes: - -- Template inheritance and inclusion. -- Define and import macros within templates. -- HTML templates can use autoescaping to prevent XSS from untrusted - user input. -- A sandboxed environment can safely render untrusted templates. -- AsyncIO support for generating templates and calling async - functions. -- I18N support with Babel. -- Templates are compiled to optimized Python code just-in-time and - cached, or can be compiled ahead-of-time. -- Exceptions point to the correct line in templates to make debugging - easier. -- Extensible filters, tests, functions, and even syntax. - -Jinja's philosophy is that while application logic belongs in Python if -possible, it shouldn't make the template designer's job difficult by -restricting functionality too much. - - -## In A Nutshell - -.. code-block:: jinja - - {% extends "base.html" %} - {% block title %}Members{% endblock %} - {% block content %} - - {% endblock %} - - -## Donate - -The Pallets organization develops and supports Jinja and other popular -packages. In order to grow the community of contributors and users, and -allow the maintainers to devote more time to the projects, [please -donate today][]. - -[please donate today]: https://palletsprojects.com/donate - diff --git a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/RECORD b/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/RECORD deleted file mode 100644 index b1a27b1..0000000 --- a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/RECORD +++ /dev/null @@ -1,57 +0,0 @@ -jinja2-3.1.4.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 -jinja2-3.1.4.dist-info/LICENSE.txt,sha256=O0nc7kEF6ze6wQ-vG-JgQI_oXSUrjp3y4JefweCUQ3s,1475 -jinja2-3.1.4.dist-info/METADATA,sha256=R_brzpPQVBvpGcsm-WbrtgotO7suQ1D0F-qkhTzeEfY,2640 -jinja2-3.1.4.dist-info/RECORD,, -jinja2-3.1.4.dist-info/WHEEL,sha256=EZbGkh7Ie4PoZfRQ8I0ZuP9VklN_TvcZ6DSE5Uar4z4,81 -jinja2-3.1.4.dist-info/entry_points.txt,sha256=OL85gYU1eD8cuPlikifFngXpeBjaxl6rIJ8KkC_3r-I,58 -jinja2/__init__.py,sha256=wIl45IM20KGw-kfr7jJhaBxxX5g4-kihlBYjxopX7Pw,1928 -jinja2/__pycache__/__init__.cpython-312.pyc,, -jinja2/__pycache__/_identifier.cpython-312.pyc,, -jinja2/__pycache__/async_utils.cpython-312.pyc,, -jinja2/__pycache__/bccache.cpython-312.pyc,, -jinja2/__pycache__/compiler.cpython-312.pyc,, -jinja2/__pycache__/constants.cpython-312.pyc,, -jinja2/__pycache__/debug.cpython-312.pyc,, -jinja2/__pycache__/defaults.cpython-312.pyc,, -jinja2/__pycache__/environment.cpython-312.pyc,, -jinja2/__pycache__/exceptions.cpython-312.pyc,, -jinja2/__pycache__/ext.cpython-312.pyc,, -jinja2/__pycache__/filters.cpython-312.pyc,, -jinja2/__pycache__/idtracking.cpython-312.pyc,, -jinja2/__pycache__/lexer.cpython-312.pyc,, -jinja2/__pycache__/loaders.cpython-312.pyc,, -jinja2/__pycache__/meta.cpython-312.pyc,, -jinja2/__pycache__/nativetypes.cpython-312.pyc,, -jinja2/__pycache__/nodes.cpython-312.pyc,, -jinja2/__pycache__/optimizer.cpython-312.pyc,, -jinja2/__pycache__/parser.cpython-312.pyc,, -jinja2/__pycache__/runtime.cpython-312.pyc,, -jinja2/__pycache__/sandbox.cpython-312.pyc,, -jinja2/__pycache__/tests.cpython-312.pyc,, -jinja2/__pycache__/utils.cpython-312.pyc,, -jinja2/__pycache__/visitor.cpython-312.pyc,, -jinja2/_identifier.py,sha256=_zYctNKzRqlk_murTNlzrju1FFJL7Va_Ijqqd7ii2lU,1958 -jinja2/async_utils.py,sha256=JXKWCAXmTx0iZB4-hAsF50vgjxw_RJTjiLOlGGTBso0,2477 -jinja2/bccache.py,sha256=gh0qs9rulnXo0PhX5jTJy2UHzI8wFnQ63o_vw7nhzRg,14061 -jinja2/compiler.py,sha256=dpV-n6_iQUP4uSwlXwGUavJmwjvXdyxKzJ-AonFjPBk,72271 -jinja2/constants.py,sha256=GMoFydBF_kdpaRKPoM5cl5MviquVRLVyZtfp5-16jg0,1433 -jinja2/debug.py,sha256=iWJ432RadxJNnaMOPrjIDInz50UEgni3_HKuFXi2vuQ,6299 -jinja2/defaults.py,sha256=boBcSw78h-lp20YbaXSJsqkAI2uN_mD_TtCydpeq5wU,1267 -jinja2/environment.py,sha256=xhFkmxO0CESA76Ki5tz4XWq9yzGu-t0p93JCCVBVNps,61538 -jinja2/exceptions.py,sha256=ioHeHrWwCWNaXX1inHmHVblvc4haO7AXsjCp3GfWvx0,5071 -jinja2/ext.py,sha256=igsBH7c6C0byHaOtMbE-ugpt4GjLGgR-ywskyXtKgq8,31877 -jinja2/filters.py,sha256=bKeqjFjjz88TkHVLSyyMIEB75CzAN6b3Airgx0phJDg,54611 -jinja2/idtracking.py,sha256=GfNmadir4oDALVxzn3DL9YInhJDr69ebXeA2ygfuCGA,10704 -jinja2/lexer.py,sha256=xnWWXhPndHFsoqzpc5VTjheDE9JuKk9MUo9DZkrM8Os,29754 -jinja2/loaders.py,sha256=ru0GIWHo5KiHJi7_MoI_LvGDoBBvP6rd0hiC1ReaTwk,23167 -jinja2/meta.py,sha256=OTDPkaFvU2Hgvx-6akz7154F8BIWaRmvJcBFvwopHww,4397 -jinja2/nativetypes.py,sha256=7GIGALVJgdyL80oZJdQUaUfwSt5q2lSSZbXt0dNf_M4,4210 -jinja2/nodes.py,sha256=m1Duzcr6qhZI8JQ6VyJgUNinjAf5bQzijSmDnMsvUx8,34579 -jinja2/optimizer.py,sha256=rJnCRlQ7pZsEEmMhsQDgC_pKyDHxP5TPS6zVPGsgcu8,1651 -jinja2/parser.py,sha256=DV1iF1FR2Rsaj_5zl8rmx7j6Bj4S8iLHoYsvJ0bfEis,39890 -jinja2/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 -jinja2/runtime.py,sha256=POXT3tKNKJRENx2CymwUsOOXH2JwGPjW702njB5__cQ,33435 -jinja2/sandbox.py,sha256=TJjBNS9qRJ2ZgBMWdAgRBpyDLOHea2kT-2mk4PrjYx0,14616 -jinja2/tests.py,sha256=VLsBhVFnWg-PxSBz1MhRnNWgP1ovXk3neO1FLQMeC9Q,5926 -jinja2/utils.py,sha256=nV7IpWLvRCMyHW1irBAK8CIPAnOFfkb2ukggDBjbBEY,23952 -jinja2/visitor.py,sha256=EcnL1PIwf_4RVCOMxsRNuR8AXHbS1qfAdMOE2ngKJz4,3557 diff --git a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/WHEEL b/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/WHEEL deleted file mode 100644 index 3b5e64b..0000000 --- a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/WHEEL +++ /dev/null @@ -1,4 +0,0 @@ -Wheel-Version: 1.0 -Generator: flit 3.9.0 -Root-Is-Purelib: true -Tag: py3-none-any diff --git a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/entry_points.txt b/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/entry_points.txt deleted file mode 100644 index abc3eae..0000000 --- a/myenv/Lib/site-packages/jinja2-3.1.4.dist-info/entry_points.txt +++ /dev/null @@ -1,3 +0,0 @@ -[babel.extractors] -jinja2=jinja2.ext:babel_extract[i18n] - diff --git a/myenv/Lib/site-packages/jinja2/__init__.py b/myenv/Lib/site-packages/jinja2/__init__.py deleted file mode 100644 index 2f0b5b2..0000000 --- a/myenv/Lib/site-packages/jinja2/__init__.py +++ /dev/null @@ -1,38 +0,0 @@ -"""Jinja is a template engine written in pure Python. It provides a -non-XML syntax that supports inline expressions and an optional -sandboxed environment. -""" - -from .bccache import BytecodeCache as BytecodeCache -from .bccache import FileSystemBytecodeCache as FileSystemBytecodeCache -from .bccache import MemcachedBytecodeCache as MemcachedBytecodeCache -from .environment import Environment as Environment -from .environment import Template as Template -from .exceptions import TemplateAssertionError as TemplateAssertionError -from .exceptions import TemplateError as TemplateError -from .exceptions import TemplateNotFound as TemplateNotFound -from .exceptions import TemplateRuntimeError as TemplateRuntimeError -from .exceptions import TemplatesNotFound as TemplatesNotFound -from .exceptions import TemplateSyntaxError as TemplateSyntaxError -from .exceptions import UndefinedError as UndefinedError -from .loaders import BaseLoader as BaseLoader -from .loaders import ChoiceLoader as ChoiceLoader -from .loaders import DictLoader as DictLoader -from .loaders import FileSystemLoader as FileSystemLoader -from .loaders import FunctionLoader as FunctionLoader -from .loaders import ModuleLoader as ModuleLoader -from .loaders import PackageLoader as PackageLoader -from .loaders import PrefixLoader as PrefixLoader -from .runtime import ChainableUndefined as ChainableUndefined -from .runtime import DebugUndefined as DebugUndefined -from .runtime import make_logging_undefined as make_logging_undefined -from .runtime import StrictUndefined as StrictUndefined -from .runtime import Undefined as Undefined -from .utils import clear_caches as clear_caches -from .utils import is_undefined as is_undefined -from .utils import pass_context as pass_context -from .utils import pass_environment as pass_environment -from .utils import pass_eval_context as pass_eval_context -from .utils import select_autoescape as select_autoescape - -__version__ = "3.1.4" diff --git a/myenv/Lib/site-packages/jinja2/_identifier.py b/myenv/Lib/site-packages/jinja2/_identifier.py deleted file mode 100644 index 928c150..0000000 --- a/myenv/Lib/site-packages/jinja2/_identifier.py +++ /dev/null @@ -1,6 +0,0 @@ -import re - -# generated by scripts/generate_identifier_pattern.py -pattern = re.compile( - r"[\w·̀-ͯ·҃-֑҇-ׇֽֿׁׂׅׄؐ-ًؚ-ٰٟۖ-ۜ۟-۪ۤۧۨ-ܑۭܰ-݊ަ-ް߫-߽߳ࠖ-࠙ࠛ-ࠣࠥ-ࠧࠩ-࡙࠭-࡛࣓-ࣣ࣡-ःऺ-़ा-ॏ॑-ॗॢॣঁ-ঃ়া-ৄেৈো-্ৗৢৣ৾ਁ-ਃ਼ਾ-ੂੇੈੋ-੍ੑੰੱੵઁ-ઃ઼ા-ૅે-ૉો-્ૢૣૺ-૿ଁ-ଃ଼ା-ୄେୈୋ-୍ୖୗୢୣஂா-ூெ-ைொ-்ௗఀ-ఄా-ౄె-ైొ-్ౕౖౢౣಁ-ಃ಼ಾ-ೄೆ-ೈೊ-್ೕೖೢೣഀ-ഃ഻഼ാ-ൄെ-ൈൊ-്ൗൢൣංඃ්ා-ුූෘ-ෟෲෳัิ-ฺ็-๎ັິ-ູົຼ່-ໍ༹༘༙༵༷༾༿ཱ-྄྆྇ྍ-ྗྙ-ྼ࿆ါ-ှၖ-ၙၞ-ၠၢ-ၤၧ-ၭၱ-ၴႂ-ႍႏႚ-ႝ፝-፟ᜒ-᜔ᜲ-᜴ᝒᝓᝲᝳ឴-៓៝᠋-᠍ᢅᢆᢩᤠ-ᤫᤰ-᤻ᨗ-ᨛᩕ-ᩞ᩠-᩿᩼᪰-᪽ᬀ-ᬄ᬴-᭄᭫-᭳ᮀ-ᮂᮡ-ᮭ᯦-᯳ᰤ-᰷᳐-᳔᳒-᳨᳭ᳲ-᳴᳷-᳹᷀-᷹᷻-᷿‿⁀⁔⃐-⃥⃜⃡-⃰℘℮⳯-⵿⳱ⷠ-〪ⷿ-゙゚〯꙯ꙴ-꙽ꚞꚟ꛰꛱ꠂ꠆ꠋꠣ-ꠧꢀꢁꢴ-ꣅ꣠-꣱ꣿꤦ-꤭ꥇ-꥓ꦀ-ꦃ꦳-꧀ꧥꨩ-ꨶꩃꩌꩍꩻ-ꩽꪰꪲ-ꪴꪷꪸꪾ꪿꫁ꫫ-ꫯꫵ꫶ꯣ-ꯪ꯬꯭ﬞ︀-️︠-︯︳︴﹍-﹏_𐇽𐋠𐍶-𐍺𐨁-𐨃𐨅𐨆𐨌-𐨏𐨸-𐨿𐨺𐫦𐫥𐴤-𐽆𐴧-𐽐𑀀-𑀂𑀸-𑁆𑁿-𑂂𑂰-𑂺𑄀-𑄂𑄧-𑄴𑅅𑅆𑅳𑆀-𑆂𑆳-𑇀𑇉-𑇌𑈬-𑈷𑈾𑋟-𑋪𑌀-𑌃𑌻𑌼𑌾-𑍄𑍇𑍈𑍋-𑍍𑍗𑍢𑍣𑍦-𑍬𑍰-𑍴𑐵-𑑆𑑞𑒰-𑓃𑖯-𑖵𑖸-𑗀𑗜𑗝𑘰-𑙀𑚫-𑚷𑜝-𑜫𑠬-𑠺𑨁-𑨊𑨳-𑨹𑨻-𑨾𑩇𑩑-𑩛𑪊-𑪙𑰯-𑰶𑰸-𑰿𑲒-𑲧𑲩-𑲶𑴱-𑴶𑴺𑴼𑴽𑴿-𑵅𑵇𑶊-𑶎𑶐𑶑𑶓-𑶗𑻳-𑻶𖫰-𖫴𖬰-𖬶𖽑-𖽾𖾏-𖾒𛲝𛲞𝅥-𝅩𝅭-𝅲𝅻-𝆂𝆅-𝆋𝆪-𝆭𝉂-𝉄𝨀-𝨶𝨻-𝩬𝩵𝪄𝪛-𝪟𝪡-𝪯𞀀-𞀆𞀈-𞀘𞀛-𞀡𞀣𞀤𞀦-𞣐𞀪-𞣖𞥄-𞥊󠄀-󠇯]+" # noqa: B950 -) diff --git a/myenv/Lib/site-packages/jinja2/async_utils.py b/myenv/Lib/site-packages/jinja2/async_utils.py deleted file mode 100644 index e65219e..0000000 --- a/myenv/Lib/site-packages/jinja2/async_utils.py +++ /dev/null @@ -1,84 +0,0 @@ -import inspect -import typing as t -from functools import WRAPPER_ASSIGNMENTS -from functools import wraps - -from .utils import _PassArg -from .utils import pass_eval_context - -V = t.TypeVar("V") - - -def async_variant(normal_func): # type: ignore - def decorator(async_func): # type: ignore - pass_arg = _PassArg.from_obj(normal_func) - need_eval_context = pass_arg is None - - if pass_arg is _PassArg.environment: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].is_async) - - else: - - def is_async(args: t.Any) -> bool: - return t.cast(bool, args[0].environment.is_async) - - # Take the doc and annotations from the sync function, but the - # name from the async function. Pallets-Sphinx-Themes - # build_function_directive expects __wrapped__ to point to the - # sync function. - async_func_attrs = ("__module__", "__name__", "__qualname__") - normal_func_attrs = tuple(set(WRAPPER_ASSIGNMENTS).difference(async_func_attrs)) - - @wraps(normal_func, assigned=normal_func_attrs) - @wraps(async_func, assigned=async_func_attrs, updated=()) - def wrapper(*args, **kwargs): # type: ignore - b = is_async(args) - - if need_eval_context: - args = args[1:] - - if b: - return async_func(*args, **kwargs) - - return normal_func(*args, **kwargs) - - if need_eval_context: - wrapper = pass_eval_context(wrapper) - - wrapper.jinja_async_variant = True # type: ignore[attr-defined] - return wrapper - - return decorator - - -_common_primitives = {int, float, bool, str, list, dict, tuple, type(None)} - - -async def auto_await(value: t.Union[t.Awaitable["V"], "V"]) -> "V": - # Avoid a costly call to isawaitable - if type(value) in _common_primitives: - return t.cast("V", value) - - if inspect.isawaitable(value): - return await t.cast("t.Awaitable[V]", value) - - return t.cast("V", value) - - -async def auto_aiter( - iterable: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> "t.AsyncIterator[V]": - if hasattr(iterable, "__aiter__"): - async for item in t.cast("t.AsyncIterable[V]", iterable): - yield item - else: - for item in iterable: - yield item - - -async def auto_to_list( - value: "t.Union[t.AsyncIterable[V], t.Iterable[V]]", -) -> t.List["V"]: - return [x async for x in auto_aiter(value)] diff --git a/myenv/Lib/site-packages/jinja2/bccache.py b/myenv/Lib/site-packages/jinja2/bccache.py deleted file mode 100644 index ada8b09..0000000 --- a/myenv/Lib/site-packages/jinja2/bccache.py +++ /dev/null @@ -1,408 +0,0 @@ -"""The optional bytecode cache system. This is useful if you have very -complex template situations and the compilation of all those templates -slows down your application too much. - -Situations where this is useful are often forking web applications that -are initialized on the first request. -""" - -import errno -import fnmatch -import marshal -import os -import pickle -import stat -import sys -import tempfile -import typing as t -from hashlib import sha1 -from io import BytesIO -from types import CodeType - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .environment import Environment - - class _MemcachedClient(te.Protocol): - def get(self, key: str) -> bytes: ... - - def set( - self, key: str, value: bytes, timeout: t.Optional[int] = None - ) -> None: ... - - -bc_version = 5 -# Magic bytes to identify Jinja bytecode cache files. Contains the -# Python major and minor version to avoid loading incompatible bytecode -# if a project upgrades its Python version. -bc_magic = ( - b"j2" - + pickle.dumps(bc_version, 2) - + pickle.dumps((sys.version_info[0] << 24) | sys.version_info[1], 2) -) - - -class Bucket: - """Buckets are used to store the bytecode for one template. It's created - and initialized by the bytecode cache and passed to the loading functions. - - The buckets get an internal checksum from the cache assigned and use this - to automatically reject outdated cache material. Individual bytecode - cache subclasses don't have to care about cache invalidation. - """ - - def __init__(self, environment: "Environment", key: str, checksum: str) -> None: - self.environment = environment - self.key = key - self.checksum = checksum - self.reset() - - def reset(self) -> None: - """Resets the bucket (unloads the bytecode).""" - self.code: t.Optional[CodeType] = None - - def load_bytecode(self, f: t.BinaryIO) -> None: - """Loads bytecode from a file or file like object.""" - # make sure the magic header is correct - magic = f.read(len(bc_magic)) - if magic != bc_magic: - self.reset() - return - # the source code of the file changed, we need to reload - checksum = pickle.load(f) - if self.checksum != checksum: - self.reset() - return - # if marshal_load fails then we need to reload - try: - self.code = marshal.load(f) - except (EOFError, ValueError, TypeError): - self.reset() - return - - def write_bytecode(self, f: t.IO[bytes]) -> None: - """Dump the bytecode into the file or file like object passed.""" - if self.code is None: - raise TypeError("can't write empty bucket") - f.write(bc_magic) - pickle.dump(self.checksum, f, 2) - marshal.dump(self.code, f) - - def bytecode_from_string(self, string: bytes) -> None: - """Load bytecode from bytes.""" - self.load_bytecode(BytesIO(string)) - - def bytecode_to_string(self) -> bytes: - """Return the bytecode as bytes.""" - out = BytesIO() - self.write_bytecode(out) - return out.getvalue() - - -class BytecodeCache: - """To implement your own bytecode cache you have to subclass this class - and override :meth:`load_bytecode` and :meth:`dump_bytecode`. Both of - these methods are passed a :class:`~jinja2.bccache.Bucket`. - - A very basic bytecode cache that saves the bytecode on the file system:: - - from os import path - - class MyCache(BytecodeCache): - - def __init__(self, directory): - self.directory = directory - - def load_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - if path.exists(filename): - with open(filename, 'rb') as f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket): - filename = path.join(self.directory, bucket.key) - with open(filename, 'wb') as f: - bucket.write_bytecode(f) - - A more advanced version of a filesystem based bytecode cache is part of - Jinja. - """ - - def load_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to load bytecode into a - bucket. If they are not able to find code in the cache for the - bucket, it must not do anything. - """ - raise NotImplementedError() - - def dump_bytecode(self, bucket: Bucket) -> None: - """Subclasses have to override this method to write the bytecode - from a bucket back to the cache. If it unable to do so it must not - fail silently but raise an exception. - """ - raise NotImplementedError() - - def clear(self) -> None: - """Clears the cache. This method is not used by Jinja but should be - implemented to allow applications to clear the bytecode cache used - by a particular environment. - """ - - def get_cache_key( - self, name: str, filename: t.Optional[t.Union[str]] = None - ) -> str: - """Returns the unique hash key for this template name.""" - hash = sha1(name.encode("utf-8")) - - if filename is not None: - hash.update(f"|{filename}".encode()) - - return hash.hexdigest() - - def get_source_checksum(self, source: str) -> str: - """Returns a checksum for the source.""" - return sha1(source.encode("utf-8")).hexdigest() - - def get_bucket( - self, - environment: "Environment", - name: str, - filename: t.Optional[str], - source: str, - ) -> Bucket: - """Return a cache bucket for the given template. All arguments are - mandatory but filename may be `None`. - """ - key = self.get_cache_key(name, filename) - checksum = self.get_source_checksum(source) - bucket = Bucket(environment, key, checksum) - self.load_bytecode(bucket) - return bucket - - def set_bucket(self, bucket: Bucket) -> None: - """Put the bucket into the cache.""" - self.dump_bytecode(bucket) - - -class FileSystemBytecodeCache(BytecodeCache): - """A bytecode cache that stores bytecode on the filesystem. It accepts - two arguments: The directory where the cache items are stored and a - pattern string that is used to build the filename. - - If no directory is specified a default cache directory is selected. On - Windows the user's temp directory is used, on UNIX systems a directory - is created for the user in the system temp directory. - - The pattern can be used to have multiple separate caches operate on the - same directory. The default pattern is ``'__jinja2_%s.cache'``. ``%s`` - is replaced with the cache key. - - >>> bcc = FileSystemBytecodeCache('/tmp/jinja_cache', '%s.cache') - - This bytecode cache supports clearing of the cache using the clear method. - """ - - def __init__( - self, directory: t.Optional[str] = None, pattern: str = "__jinja2_%s.cache" - ) -> None: - if directory is None: - directory = self._get_default_cache_dir() - self.directory = directory - self.pattern = pattern - - def _get_default_cache_dir(self) -> str: - def _unsafe_dir() -> "te.NoReturn": - raise RuntimeError( - "Cannot determine safe temp directory. You " - "need to explicitly provide one." - ) - - tmpdir = tempfile.gettempdir() - - # On windows the temporary directory is used specific unless - # explicitly forced otherwise. We can just use that. - if os.name == "nt": - return tmpdir - if not hasattr(os, "getuid"): - _unsafe_dir() - - dirname = f"_jinja2-cache-{os.getuid()}" - actual_dir = os.path.join(tmpdir, dirname) - - try: - os.mkdir(actual_dir, stat.S_IRWXU) - except OSError as e: - if e.errno != errno.EEXIST: - raise - try: - os.chmod(actual_dir, stat.S_IRWXU) - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - except OSError as e: - if e.errno != errno.EEXIST: - raise - - actual_dir_stat = os.lstat(actual_dir) - if ( - actual_dir_stat.st_uid != os.getuid() - or not stat.S_ISDIR(actual_dir_stat.st_mode) - or stat.S_IMODE(actual_dir_stat.st_mode) != stat.S_IRWXU - ): - _unsafe_dir() - - return actual_dir - - def _get_cache_filename(self, bucket: Bucket) -> str: - return os.path.join(self.directory, self.pattern % (bucket.key,)) - - def load_bytecode(self, bucket: Bucket) -> None: - filename = self._get_cache_filename(bucket) - - # Don't test for existence before opening the file, since the - # file could disappear after the test before the open. - try: - f = open(filename, "rb") - except (FileNotFoundError, IsADirectoryError, PermissionError): - # PermissionError can occur on Windows when an operation is - # in progress, such as calling clear(). - return - - with f: - bucket.load_bytecode(f) - - def dump_bytecode(self, bucket: Bucket) -> None: - # Write to a temporary file, then rename to the real name after - # writing. This avoids another process reading the file before - # it is fully written. - name = self._get_cache_filename(bucket) - f = tempfile.NamedTemporaryFile( - mode="wb", - dir=os.path.dirname(name), - prefix=os.path.basename(name), - suffix=".tmp", - delete=False, - ) - - def remove_silent() -> None: - try: - os.remove(f.name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - pass - - try: - with f: - bucket.write_bytecode(f) - except BaseException: - remove_silent() - raise - - try: - os.replace(f.name, name) - except OSError: - # Another process may have called clear(). On Windows, - # another program may be holding the file open. - remove_silent() - except BaseException: - remove_silent() - raise - - def clear(self) -> None: - # imported lazily here because google app-engine doesn't support - # write access on the file system and the function does not exist - # normally. - from os import remove - - files = fnmatch.filter(os.listdir(self.directory), self.pattern % ("*",)) - for filename in files: - try: - remove(os.path.join(self.directory, filename)) - except OSError: - pass - - -class MemcachedBytecodeCache(BytecodeCache): - """This class implements a bytecode cache that uses a memcache cache for - storing the information. It does not enforce a specific memcache library - (tummy's memcache or cmemcache) but will accept any class that provides - the minimal interface required. - - Libraries compatible with this class: - - - `cachelib `_ - - `python-memcached `_ - - (Unfortunately the django cache interface is not compatible because it - does not support storing binary data, only text. You can however pass - the underlying cache client to the bytecode cache which is available - as `django.core.cache.cache._client`.) - - The minimal interface for the client passed to the constructor is this: - - .. class:: MinimalClientInterface - - .. method:: set(key, value[, timeout]) - - Stores the bytecode in the cache. `value` is a string and - `timeout` the timeout of the key. If timeout is not provided - a default timeout or no timeout should be assumed, if it's - provided it's an integer with the number of seconds the cache - item should exist. - - .. method:: get(key) - - Returns the value for the cache key. If the item does not - exist in the cache the return value must be `None`. - - The other arguments to the constructor are the prefix for all keys that - is added before the actual cache key and the timeout for the bytecode in - the cache system. We recommend a high (or no) timeout. - - This bytecode cache does not support clearing of used items in the cache. - The clear method is a no-operation function. - - .. versionadded:: 2.7 - Added support for ignoring memcache errors through the - `ignore_memcache_errors` parameter. - """ - - def __init__( - self, - client: "_MemcachedClient", - prefix: str = "jinja2/bytecode/", - timeout: t.Optional[int] = None, - ignore_memcache_errors: bool = True, - ): - self.client = client - self.prefix = prefix - self.timeout = timeout - self.ignore_memcache_errors = ignore_memcache_errors - - def load_bytecode(self, bucket: Bucket) -> None: - try: - code = self.client.get(self.prefix + bucket.key) - except Exception: - if not self.ignore_memcache_errors: - raise - else: - bucket.bytecode_from_string(code) - - def dump_bytecode(self, bucket: Bucket) -> None: - key = self.prefix + bucket.key - value = bucket.bytecode_to_string() - - try: - if self.timeout is not None: - self.client.set(key, value, self.timeout) - else: - self.client.set(key, value) - except Exception: - if not self.ignore_memcache_errors: - raise diff --git a/myenv/Lib/site-packages/jinja2/compiler.py b/myenv/Lib/site-packages/jinja2/compiler.py deleted file mode 100644 index 2740717..0000000 --- a/myenv/Lib/site-packages/jinja2/compiler.py +++ /dev/null @@ -1,1960 +0,0 @@ -"""Compiles nodes from the parser into Python code.""" - -import typing as t -from contextlib import contextmanager -from functools import update_wrapper -from io import StringIO -from itertools import chain -from keyword import iskeyword as is_python_keyword - -from markupsafe import escape -from markupsafe import Markup - -from . import nodes -from .exceptions import TemplateAssertionError -from .idtracking import Symbols -from .idtracking import VAR_LOAD_ALIAS -from .idtracking import VAR_LOAD_PARAMETER -from .idtracking import VAR_LOAD_RESOLVE -from .idtracking import VAR_LOAD_UNDEFINED -from .nodes import EvalContext -from .optimizer import Optimizer -from .utils import _PassArg -from .utils import concat -from .visitor import NodeVisitor - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .environment import Environment - -F = t.TypeVar("F", bound=t.Callable[..., t.Any]) - -operators = { - "eq": "==", - "ne": "!=", - "gt": ">", - "gteq": ">=", - "lt": "<", - "lteq": "<=", - "in": "in", - "notin": "not in", -} - - -def optimizeconst(f: F) -> F: - def new_func( - self: "CodeGenerator", node: nodes.Expr, frame: "Frame", **kwargs: t.Any - ) -> t.Any: - # Only optimize if the frame is not volatile - if self.optimizer is not None and not frame.eval_ctx.volatile: - new_node = self.optimizer.visit(node, frame.eval_ctx) - - if new_node != node: - return self.visit(new_node, frame) - - return f(self, node, frame, **kwargs) - - return update_wrapper(t.cast(F, new_func), f) - - -def _make_binop(op: str) -> t.Callable[["CodeGenerator", nodes.BinExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.BinExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed and op in self.environment.intercepted_binops # type: ignore - ): - self.write(f"environment.call_binop(context, {op!r}, ") - self.visit(node.left, frame) - self.write(", ") - self.visit(node.right, frame) - else: - self.write("(") - self.visit(node.left, frame) - self.write(f" {op} ") - self.visit(node.right, frame) - - self.write(")") - - return visitor - - -def _make_unop( - op: str, -) -> t.Callable[["CodeGenerator", nodes.UnaryExpr, "Frame"], None]: - @optimizeconst - def visitor(self: "CodeGenerator", node: nodes.UnaryExpr, frame: Frame) -> None: - if ( - self.environment.sandboxed and op in self.environment.intercepted_unops # type: ignore - ): - self.write(f"environment.call_unop(context, {op!r}, ") - self.visit(node.node, frame) - else: - self.write("(" + op) - self.visit(node.node, frame) - - self.write(")") - - return visitor - - -def generate( - node: nodes.Template, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, -) -> t.Optional[str]: - """Generate the python source for a node tree.""" - if not isinstance(node, nodes.Template): - raise TypeError("Can't compile non template nodes") - - generator = environment.code_generator_class( - environment, name, filename, stream, defer_init, optimized - ) - generator.visit(node) - - if stream is None: - return generator.stream.getvalue() # type: ignore - - return None - - -def has_safe_repr(value: t.Any) -> bool: - """Does the node have a safe representation?""" - if value is None or value is NotImplemented or value is Ellipsis: - return True - - if type(value) in {bool, int, float, complex, range, str, Markup}: - return True - - if type(value) in {tuple, list, set, frozenset}: - return all(has_safe_repr(v) for v in value) - - if type(value) is dict: # noqa E721 - return all(has_safe_repr(k) and has_safe_repr(v) for k, v in value.items()) - - return False - - -def find_undeclared( - nodes: t.Iterable[nodes.Node], names: t.Iterable[str] -) -> t.Set[str]: - """Check if the names passed are accessed undeclared. The return value - is a set of all the undeclared names from the sequence of names found. - """ - visitor = UndeclaredNameVisitor(names) - try: - for node in nodes: - visitor.visit(node) - except VisitorExit: - pass - return visitor.undeclared - - -class MacroRef: - def __init__(self, node: t.Union[nodes.Macro, nodes.CallBlock]) -> None: - self.node = node - self.accesses_caller = False - self.accesses_kwargs = False - self.accesses_varargs = False - - -class Frame: - """Holds compile time information for us.""" - - def __init__( - self, - eval_ctx: EvalContext, - parent: t.Optional["Frame"] = None, - level: t.Optional[int] = None, - ) -> None: - self.eval_ctx = eval_ctx - - # the parent of this frame - self.parent = parent - - if parent is None: - self.symbols = Symbols(level=level) - - # in some dynamic inheritance situations the compiler needs to add - # write tests around output statements. - self.require_output_check = False - - # inside some tags we are using a buffer rather than yield statements. - # this for example affects {% filter %} or {% macro %}. If a frame - # is buffered this variable points to the name of the list used as - # buffer. - self.buffer: t.Optional[str] = None - - # the name of the block we're in, otherwise None. - self.block: t.Optional[str] = None - - else: - self.symbols = Symbols(parent.symbols, level=level) - self.require_output_check = parent.require_output_check - self.buffer = parent.buffer - self.block = parent.block - - # a toplevel frame is the root + soft frames such as if conditions. - self.toplevel = False - - # the root frame is basically just the outermost frame, so no if - # conditions. This information is used to optimize inheritance - # situations. - self.rootlevel = False - - # variables set inside of loops and blocks should not affect outer frames, - # but they still needs to be kept track of as part of the active context. - self.loop_frame = False - self.block_frame = False - - # track whether the frame is being used in an if-statement or conditional - # expression as it determines which errors should be raised during runtime - # or compile time. - self.soft_frame = False - - def copy(self) -> "Frame": - """Create a copy of the current one.""" - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.symbols = self.symbols.copy() - return rv - - def inner(self, isolated: bool = False) -> "Frame": - """Return an inner frame.""" - if isolated: - return Frame(self.eval_ctx, level=self.symbols.level + 1) - return Frame(self.eval_ctx, self) - - def soft(self) -> "Frame": - """Return a soft frame. A soft frame may not be modified as - standalone thing as it shares the resources with the frame it - was created of, but it's not a rootlevel frame any longer. - - This is only used to implement if-statements and conditional - expressions. - """ - rv = self.copy() - rv.rootlevel = False - rv.soft_frame = True - return rv - - __copy__ = copy - - -class VisitorExit(RuntimeError): - """Exception used by the `UndeclaredNameVisitor` to signal a stop.""" - - -class DependencyFinderVisitor(NodeVisitor): - """A visitor that collects filter and test calls.""" - - def __init__(self) -> None: - self.filters: t.Set[str] = set() - self.tests: t.Set[str] = set() - - def visit_Filter(self, node: nodes.Filter) -> None: - self.generic_visit(node) - self.filters.add(node.name) - - def visit_Test(self, node: nodes.Test) -> None: - self.generic_visit(node) - self.tests.add(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting at blocks.""" - - -class UndeclaredNameVisitor(NodeVisitor): - """A visitor that checks if a name is accessed without being - declared. This is different from the frame visitor as it will - not stop at closure frames. - """ - - def __init__(self, names: t.Iterable[str]) -> None: - self.names = set(names) - self.undeclared: t.Set[str] = set() - - def visit_Name(self, node: nodes.Name) -> None: - if node.ctx == "load" and node.name in self.names: - self.undeclared.add(node.name) - if self.undeclared == self.names: - raise VisitorExit() - else: - self.names.discard(node.name) - - def visit_Block(self, node: nodes.Block) -> None: - """Stop visiting a blocks.""" - - -class CompilerExit(Exception): - """Raised if the compiler encountered a situation where it just - doesn't make sense to further process the code. Any block that - raises such an exception is not further processed. - """ - - -class CodeGenerator(NodeVisitor): - def __init__( - self, - environment: "Environment", - name: t.Optional[str], - filename: t.Optional[str], - stream: t.Optional[t.TextIO] = None, - defer_init: bool = False, - optimized: bool = True, - ) -> None: - if stream is None: - stream = StringIO() - self.environment = environment - self.name = name - self.filename = filename - self.stream = stream - self.created_block_context = False - self.defer_init = defer_init - self.optimizer: t.Optional[Optimizer] = None - - if optimized: - self.optimizer = Optimizer(environment) - - # aliases for imports - self.import_aliases: t.Dict[str, str] = {} - - # a registry for all blocks. Because blocks are moved out - # into the global python scope they are registered here - self.blocks: t.Dict[str, nodes.Block] = {} - - # the number of extends statements so far - self.extends_so_far = 0 - - # some templates have a rootlevel extends. In this case we - # can safely assume that we're a child template and do some - # more optimizations. - self.has_known_extends = False - - # the current line number - self.code_lineno = 1 - - # registry of all filters and tests (global, not block local) - self.tests: t.Dict[str, str] = {} - self.filters: t.Dict[str, str] = {} - - # the debug information - self.debug_info: t.List[t.Tuple[int, int]] = [] - self._write_debug_info: t.Optional[int] = None - - # the number of new lines before the next write() - self._new_lines = 0 - - # the line number of the last written statement - self._last_line = 0 - - # true if nothing was written so far. - self._first_write = True - - # used by the `temporary_identifier` method to get new - # unique, temporary identifier - self._last_identifier = 0 - - # the current indentation - self._indentation = 0 - - # Tracks toplevel assignments - self._assign_stack: t.List[t.Set[str]] = [] - - # Tracks parameter definition blocks - self._param_def_block: t.List[t.Set[str]] = [] - - # Tracks the current context. - self._context_reference_stack = ["context"] - - @property - def optimized(self) -> bool: - return self.optimizer is not None - - # -- Various compilation helpers - - def fail(self, msg: str, lineno: int) -> "te.NoReturn": - """Fail with a :exc:`TemplateAssertionError`.""" - raise TemplateAssertionError(msg, lineno, self.name, self.filename) - - def temporary_identifier(self) -> str: - """Get a new unique identifier.""" - self._last_identifier += 1 - return f"t_{self._last_identifier}" - - def buffer(self, frame: Frame) -> None: - """Enable buffering for the frame from that point onwards.""" - frame.buffer = self.temporary_identifier() - self.writeline(f"{frame.buffer} = []") - - def return_buffer_contents( - self, frame: Frame, force_unescaped: bool = False - ) -> None: - """Return the buffer contents of the frame.""" - if not force_unescaped: - if frame.eval_ctx.volatile: - self.writeline("if context.eval_ctx.autoescape:") - self.indent() - self.writeline(f"return Markup(concat({frame.buffer}))") - self.outdent() - self.writeline("else:") - self.indent() - self.writeline(f"return concat({frame.buffer})") - self.outdent() - return - elif frame.eval_ctx.autoescape: - self.writeline(f"return Markup(concat({frame.buffer}))") - return - self.writeline(f"return concat({frame.buffer})") - - def indent(self) -> None: - """Indent by one.""" - self._indentation += 1 - - def outdent(self, step: int = 1) -> None: - """Outdent by step.""" - self._indentation -= step - - def start_write(self, frame: Frame, node: t.Optional[nodes.Node] = None) -> None: - """Yield or write into the frame buffer.""" - if frame.buffer is None: - self.writeline("yield ", node) - else: - self.writeline(f"{frame.buffer}.append(", node) - - def end_write(self, frame: Frame) -> None: - """End the writing process started by `start_write`.""" - if frame.buffer is not None: - self.write(")") - - def simple_write( - self, s: str, frame: Frame, node: t.Optional[nodes.Node] = None - ) -> None: - """Simple shortcut for start_write + write + end_write.""" - self.start_write(frame, node) - self.write(s) - self.end_write(frame) - - def blockvisit(self, nodes: t.Iterable[nodes.Node], frame: Frame) -> None: - """Visit a list of nodes as block in a frame. If the current frame - is no buffer a dummy ``if 0: yield None`` is written automatically. - """ - try: - self.writeline("pass") - for node in nodes: - self.visit(node, frame) - except CompilerExit: - pass - - def write(self, x: str) -> None: - """Write a string into the output stream.""" - if self._new_lines: - if not self._first_write: - self.stream.write("\n" * self._new_lines) - self.code_lineno += self._new_lines - if self._write_debug_info is not None: - self.debug_info.append((self._write_debug_info, self.code_lineno)) - self._write_debug_info = None - self._first_write = False - self.stream.write(" " * self._indentation) - self._new_lines = 0 - self.stream.write(x) - - def writeline( - self, x: str, node: t.Optional[nodes.Node] = None, extra: int = 0 - ) -> None: - """Combination of newline and write.""" - self.newline(node, extra) - self.write(x) - - def newline(self, node: t.Optional[nodes.Node] = None, extra: int = 0) -> None: - """Add one or more newlines before the next write.""" - self._new_lines = max(self._new_lines, 1 + extra) - if node is not None and node.lineno != self._last_line: - self._write_debug_info = node.lineno - self._last_line = node.lineno - - def signature( - self, - node: t.Union[nodes.Call, nodes.Filter, nodes.Test], - frame: Frame, - extra_kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - ) -> None: - """Writes a function call to the stream for the current node. - A leading comma is added automatically. The extra keyword - arguments may not include python keywords otherwise a syntax - error could occur. The extra keyword arguments should be given - as python dict. - """ - # if any of the given keyword arguments is a python keyword - # we have to make sure that no invalid call is created. - kwarg_workaround = any( - is_python_keyword(t.cast(str, k)) - for k in chain((x.key for x in node.kwargs), extra_kwargs or ()) - ) - - for arg in node.args: - self.write(", ") - self.visit(arg, frame) - - if not kwarg_workaround: - for kwarg in node.kwargs: - self.write(", ") - self.visit(kwarg, frame) - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f", {key}={value}") - if node.dyn_args: - self.write(", *") - self.visit(node.dyn_args, frame) - - if kwarg_workaround: - if node.dyn_kwargs is not None: - self.write(", **dict({") - else: - self.write(", **{") - for kwarg in node.kwargs: - self.write(f"{kwarg.key!r}: ") - self.visit(kwarg.value, frame) - self.write(", ") - if extra_kwargs is not None: - for key, value in extra_kwargs.items(): - self.write(f"{key!r}: {value}, ") - if node.dyn_kwargs is not None: - self.write("}, **") - self.visit(node.dyn_kwargs, frame) - self.write(")") - else: - self.write("}") - - elif node.dyn_kwargs is not None: - self.write(", **") - self.visit(node.dyn_kwargs, frame) - - def pull_dependencies(self, nodes: t.Iterable[nodes.Node]) -> None: - """Find all filter and test names used in the template and - assign them to variables in the compiled namespace. Checking - that the names are registered with the environment is done when - compiling the Filter and Test nodes. If the node is in an If or - CondExpr node, the check is done at runtime instead. - - .. versionchanged:: 3.0 - Filters and tests in If and CondExpr nodes are checked at - runtime instead of compile time. - """ - visitor = DependencyFinderVisitor() - - for node in nodes: - visitor.visit(node) - - for id_map, names, dependency in ( - (self.filters, visitor.filters, "filters"), - ( - self.tests, - visitor.tests, - "tests", - ), - ): - for name in sorted(names): - if name not in id_map: - id_map[name] = self.temporary_identifier() - - # add check during runtime that dependencies used inside of executed - # blocks are defined, as this step may be skipped during compile time - self.writeline("try:") - self.indent() - self.writeline(f"{id_map[name]} = environment.{dependency}[{name!r}]") - self.outdent() - self.writeline("except KeyError:") - self.indent() - self.writeline("@internalcode") - self.writeline(f"def {id_map[name]}(*unused):") - self.indent() - self.writeline( - f'raise TemplateRuntimeError("No {dependency[:-1]}' - f' named {name!r} found.")' - ) - self.outdent() - self.outdent() - - def enter_frame(self, frame: Frame) -> None: - undefs = [] - for target, (action, param) in frame.symbols.loads.items(): - if action == VAR_LOAD_PARAMETER: - pass - elif action == VAR_LOAD_RESOLVE: - self.writeline(f"{target} = {self.get_resolve_func()}({param!r})") - elif action == VAR_LOAD_ALIAS: - self.writeline(f"{target} = {param}") - elif action == VAR_LOAD_UNDEFINED: - undefs.append(target) - else: - raise NotImplementedError("unknown load instruction") - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def leave_frame(self, frame: Frame, with_python_scope: bool = False) -> None: - if not with_python_scope: - undefs = [] - for target in frame.symbols.loads: - undefs.append(target) - if undefs: - self.writeline(f"{' = '.join(undefs)} = missing") - - def choose_async(self, async_value: str = "async ", sync_value: str = "") -> str: - return async_value if self.environment.is_async else sync_value - - def func(self, name: str) -> str: - return f"{self.choose_async()}def {name}" - - def macro_body( - self, node: t.Union[nodes.Macro, nodes.CallBlock], frame: Frame - ) -> t.Tuple[Frame, MacroRef]: - """Dump the function def of a macro or call block.""" - frame = frame.inner() - frame.symbols.analyze_node(node) - macro_ref = MacroRef(node) - - explicit_caller = None - skip_special_params = set() - args = [] - - for idx, arg in enumerate(node.args): - if arg.name == "caller": - explicit_caller = idx - if arg.name in ("kwargs", "varargs"): - skip_special_params.add(arg.name) - args.append(frame.symbols.ref(arg.name)) - - undeclared = find_undeclared(node.body, ("caller", "kwargs", "varargs")) - - if "caller" in undeclared: - # In older Jinja versions there was a bug that allowed caller - # to retain the special behavior even if it was mentioned in - # the argument list. However thankfully this was only really - # working if it was the last argument. So we are explicitly - # checking this now and error out if it is anywhere else in - # the argument list. - if explicit_caller is not None: - try: - node.defaults[explicit_caller - len(node.args)] - except IndexError: - self.fail( - "When defining macros or call blocks the " - 'special "caller" argument must be omitted ' - "or be given a default.", - node.lineno, - ) - else: - args.append(frame.symbols.declare_parameter("caller")) - macro_ref.accesses_caller = True - if "kwargs" in undeclared and "kwargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("kwargs")) - macro_ref.accesses_kwargs = True - if "varargs" in undeclared and "varargs" not in skip_special_params: - args.append(frame.symbols.declare_parameter("varargs")) - macro_ref.accesses_varargs = True - - # macros are delayed, they never require output checks - frame.require_output_check = False - frame.symbols.analyze_node(node) - self.writeline(f"{self.func('macro')}({', '.join(args)}):", node) - self.indent() - - self.buffer(frame) - self.enter_frame(frame) - - self.push_parameter_definitions(frame) - for idx, arg in enumerate(node.args): - ref = frame.symbols.ref(arg.name) - self.writeline(f"if {ref} is missing:") - self.indent() - try: - default = node.defaults[idx - len(node.args)] - except IndexError: - self.writeline( - f'{ref} = undefined("parameter {arg.name!r} was not provided",' - f" name={arg.name!r})" - ) - else: - self.writeline(f"{ref} = ") - self.visit(default, frame) - self.mark_parameter_stored(ref) - self.outdent() - self.pop_parameter_definitions() - - self.blockvisit(node.body, frame) - self.return_buffer_contents(frame, force_unescaped=True) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - return frame, macro_ref - - def macro_def(self, macro_ref: MacroRef, frame: Frame) -> None: - """Dump the macro definition for the def created by macro_body.""" - arg_tuple = ", ".join(repr(x.name) for x in macro_ref.node.args) - name = getattr(macro_ref.node, "name", None) - if len(macro_ref.node.args) == 1: - arg_tuple += "," - self.write( - f"Macro(environment, macro, {name!r}, ({arg_tuple})," - f" {macro_ref.accesses_kwargs!r}, {macro_ref.accesses_varargs!r}," - f" {macro_ref.accesses_caller!r}, context.eval_ctx.autoescape)" - ) - - def position(self, node: nodes.Node) -> str: - """Return a human readable position for the node.""" - rv = f"line {node.lineno}" - if self.name is not None: - rv = f"{rv} in {self.name!r}" - return rv - - def dump_local_context(self, frame: Frame) -> str: - items_kv = ", ".join( - f"{name!r}: {target}" - for name, target in frame.symbols.dump_stores().items() - ) - return f"{{{items_kv}}}" - - def write_commons(self) -> None: - """Writes a common preamble that is used by root and block functions. - Primarily this sets up common local helpers and enforces a generator - through a dead branch. - """ - self.writeline("resolve = context.resolve_or_missing") - self.writeline("undefined = environment.undefined") - self.writeline("concat = environment.concat") - # always use the standard Undefined class for the implicit else of - # conditional expressions - self.writeline("cond_expr_undefined = Undefined") - self.writeline("if 0: yield None") - - def push_parameter_definitions(self, frame: Frame) -> None: - """Pushes all parameter targets from the given frame into a local - stack that permits tracking of yet to be assigned parameters. In - particular this enables the optimization from `visit_Name` to skip - undefined expressions for parameters in macros as macros can reference - otherwise unbound parameters. - """ - self._param_def_block.append(frame.symbols.dump_param_targets()) - - def pop_parameter_definitions(self) -> None: - """Pops the current parameter definitions set.""" - self._param_def_block.pop() - - def mark_parameter_stored(self, target: str) -> None: - """Marks a parameter in the current parameter definitions as stored. - This will skip the enforced undefined checks. - """ - if self._param_def_block: - self._param_def_block[-1].discard(target) - - def push_context_reference(self, target: str) -> None: - self._context_reference_stack.append(target) - - def pop_context_reference(self) -> None: - self._context_reference_stack.pop() - - def get_context_ref(self) -> str: - return self._context_reference_stack[-1] - - def get_resolve_func(self) -> str: - target = self._context_reference_stack[-1] - if target == "context": - return "resolve" - return f"{target}.resolve" - - def derive_context(self, frame: Frame) -> str: - return f"{self.get_context_ref()}.derived({self.dump_local_context(frame)})" - - def parameter_is_undeclared(self, target: str) -> bool: - """Checks if a given target is an undeclared parameter.""" - if not self._param_def_block: - return False - return target in self._param_def_block[-1] - - def push_assign_tracking(self) -> None: - """Pushes a new layer for assignment tracking.""" - self._assign_stack.append(set()) - - def pop_assign_tracking(self, frame: Frame) -> None: - """Pops the topmost level for assignment tracking and updates the - context variables if necessary. - """ - vars = self._assign_stack.pop() - if ( - not frame.block_frame - and not frame.loop_frame - and not frame.toplevel - or not vars - ): - return - public_names = [x for x in vars if x[:1] != "_"] - if len(vars) == 1: - name = next(iter(vars)) - ref = frame.symbols.ref(name) - if frame.loop_frame: - self.writeline(f"_loop_vars[{name!r}] = {ref}") - return - if frame.block_frame: - self.writeline(f"_block_vars[{name!r}] = {ref}") - return - self.writeline(f"context.vars[{name!r}] = {ref}") - else: - if frame.loop_frame: - self.writeline("_loop_vars.update({") - elif frame.block_frame: - self.writeline("_block_vars.update({") - else: - self.writeline("context.vars.update({") - for idx, name in enumerate(vars): - if idx: - self.write(", ") - ref = frame.symbols.ref(name) - self.write(f"{name!r}: {ref}") - self.write("})") - if not frame.block_frame and not frame.loop_frame and public_names: - if len(public_names) == 1: - self.writeline(f"context.exported_vars.add({public_names[0]!r})") - else: - names_str = ", ".join(map(repr, public_names)) - self.writeline(f"context.exported_vars.update(({names_str}))") - - # -- Statement Visitors - - def visit_Template( - self, node: nodes.Template, frame: t.Optional[Frame] = None - ) -> None: - assert frame is None, "no root frame allowed" - eval_ctx = EvalContext(self.environment, self.name) - - from .runtime import async_exported - from .runtime import exported - - if self.environment.is_async: - exported_names = sorted(exported + async_exported) - else: - exported_names = sorted(exported) - - self.writeline("from jinja2.runtime import " + ", ".join(exported_names)) - - # if we want a deferred initialization we cannot move the - # environment into a local name - envenv = "" if self.defer_init else ", environment=environment" - - # do we have an extends tag at all? If not, we can save some - # overhead by just not processing any inheritance code. - have_extends = node.find(nodes.Extends) is not None - - # find all blocks - for block in node.find_all(nodes.Block): - if block.name in self.blocks: - self.fail(f"block {block.name!r} defined twice", block.lineno) - self.blocks[block.name] = block - - # find all imports and import them - for import_ in node.find_all(nodes.ImportedName): - if import_.importname not in self.import_aliases: - imp = import_.importname - self.import_aliases[imp] = alias = self.temporary_identifier() - if "." in imp: - module, obj = imp.rsplit(".", 1) - self.writeline(f"from {module} import {obj} as {alias}") - else: - self.writeline(f"import {imp} as {alias}") - - # add the load name - self.writeline(f"name = {self.name!r}") - - # generate the root render function. - self.writeline( - f"{self.func('root')}(context, missing=missing{envenv}):", extra=1 - ) - self.indent() - self.write_commons() - - # process the root - frame = Frame(eval_ctx) - if "self" in find_undeclared(node.body, ("self",)): - ref = frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - frame.symbols.analyze_node(node) - frame.toplevel = frame.rootlevel = True - frame.require_output_check = have_extends and not self.has_known_extends - if have_extends: - self.writeline("parent_template = None") - self.enter_frame(frame) - self.pull_dependencies(node.body) - self.blockvisit(node.body, frame) - self.leave_frame(frame, with_python_scope=True) - self.outdent() - - # make sure that the parent root is called. - if have_extends: - if not self.has_known_extends: - self.indent() - self.writeline("if parent_template is not None:") - self.indent() - if not self.environment.is_async: - self.writeline("yield from parent_template.root_render_func(context)") - else: - self.writeline( - "async for event in parent_template.root_render_func(context):" - ) - self.indent() - self.writeline("yield event") - self.outdent() - self.outdent(1 + (not self.has_known_extends)) - - # at this point we now have the blocks collected and can visit them too. - for name, block in self.blocks.items(): - self.writeline( - f"{self.func('block_' + name)}(context, missing=missing{envenv}):", - block, - 1, - ) - self.indent() - self.write_commons() - # It's important that we do not make this frame a child of the - # toplevel template. This would cause a variety of - # interesting issues with identifier tracking. - block_frame = Frame(eval_ctx) - block_frame.block_frame = True - undeclared = find_undeclared(block.body, ("self", "super")) - if "self" in undeclared: - ref = block_frame.symbols.declare_parameter("self") - self.writeline(f"{ref} = TemplateReference(context)") - if "super" in undeclared: - ref = block_frame.symbols.declare_parameter("super") - self.writeline(f"{ref} = context.super({name!r}, block_{name})") - block_frame.symbols.analyze_node(block) - block_frame.block = name - self.writeline("_block_vars = {}") - self.enter_frame(block_frame) - self.pull_dependencies(block.body) - self.blockvisit(block.body, block_frame) - self.leave_frame(block_frame, with_python_scope=True) - self.outdent() - - blocks_kv_str = ", ".join(f"{x!r}: block_{x}" for x in self.blocks) - self.writeline(f"blocks = {{{blocks_kv_str}}}", extra=1) - debug_kv_str = "&".join(f"{k}={v}" for k, v in self.debug_info) - self.writeline(f"debug_info = {debug_kv_str!r}") - - def visit_Block(self, node: nodes.Block, frame: Frame) -> None: - """Call a block and register it for the template.""" - level = 0 - if frame.toplevel: - # if we know that we are a child template, there is no need to - # check if we are one - if self.has_known_extends: - return - if self.extends_so_far > 0: - self.writeline("if parent_template is None:") - self.indent() - level += 1 - - if node.scoped: - context = self.derive_context(frame) - else: - context = self.get_context_ref() - - if node.required: - self.writeline(f"if len(context.blocks[{node.name!r}]) <= 1:", node) - self.indent() - self.writeline( - f'raise TemplateRuntimeError("Required block {node.name!r} not found")', - node, - ) - self.outdent() - - if not self.environment.is_async and frame.buffer is None: - self.writeline( - f"yield from context.blocks[{node.name!r}][0]({context})", node - ) - else: - self.writeline( - f"{self.choose_async()}for event in" - f" context.blocks[{node.name!r}][0]({context}):", - node, - ) - self.indent() - self.simple_write("event", frame) - self.outdent() - - self.outdent(level) - - def visit_Extends(self, node: nodes.Extends, frame: Frame) -> None: - """Calls the extender.""" - if not frame.toplevel: - self.fail("cannot use extend from a non top-level scope", node.lineno) - - # if the number of extends statements in general is zero so - # far, we don't have to add a check if something extended - # the template before this one. - if self.extends_so_far > 0: - # if we have a known extends we just add a template runtime - # error into the generated code. We could catch that at compile - # time too, but i welcome it not to confuse users by throwing the - # same error at different times just "because we can". - if not self.has_known_extends: - self.writeline("if parent_template is not None:") - self.indent() - self.writeline('raise TemplateRuntimeError("extended multiple times")') - - # if we have a known extends already we don't need that code here - # as we know that the template execution will end here. - if self.has_known_extends: - raise CompilerExit() - else: - self.outdent() - - self.writeline("parent_template = environment.get_template(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - self.writeline("for name, parent_block in parent_template.blocks.items():") - self.indent() - self.writeline("context.blocks.setdefault(name, []).append(parent_block)") - self.outdent() - - # if this extends statement was in the root level we can take - # advantage of that information and simplify the generated code - # in the top level from this point onwards - if frame.rootlevel: - self.has_known_extends = True - - # and now we have one more - self.extends_so_far += 1 - - def visit_Include(self, node: nodes.Include, frame: Frame) -> None: - """Handles includes.""" - if node.ignore_missing: - self.writeline("try:") - self.indent() - - func_name = "get_or_select_template" - if isinstance(node.template, nodes.Const): - if isinstance(node.template.value, str): - func_name = "get_template" - elif isinstance(node.template.value, (tuple, list)): - func_name = "select_template" - elif isinstance(node.template, (nodes.Tuple, nodes.List)): - func_name = "select_template" - - self.writeline(f"template = environment.{func_name}(", node) - self.visit(node.template, frame) - self.write(f", {self.name!r})") - if node.ignore_missing: - self.outdent() - self.writeline("except TemplateNotFound:") - self.indent() - self.writeline("pass") - self.outdent() - self.writeline("else:") - self.indent() - - skip_event_yield = False - if node.with_context: - self.writeline( - f"{self.choose_async()}for event in template.root_render_func(" - "template.new_context(context.get_all(), True," - f" {self.dump_local_context(frame)})):" - ) - elif self.environment.is_async: - self.writeline( - "for event in (await template._get_default_module_async())" - "._body_stream:" - ) - else: - self.writeline("yield from template._get_default_module()._body_stream") - skip_event_yield = True - - if not skip_event_yield: - self.indent() - self.simple_write("event", frame) - self.outdent() - - if node.ignore_missing: - self.outdent() - - def _import_common( - self, node: t.Union[nodes.Import, nodes.FromImport], frame: Frame - ) -> None: - self.write(f"{self.choose_async('await ')}environment.get_template(") - self.visit(node.template, frame) - self.write(f", {self.name!r}).") - - if node.with_context: - f_name = f"make_module{self.choose_async('_async')}" - self.write( - f"{f_name}(context.get_all(), True, {self.dump_local_context(frame)})" - ) - else: - self.write(f"_get_default_module{self.choose_async('_async')}(context)") - - def visit_Import(self, node: nodes.Import, frame: Frame) -> None: - """Visit regular imports.""" - self.writeline(f"{frame.symbols.ref(node.target)} = ", node) - if frame.toplevel: - self.write(f"context.vars[{node.target!r}] = ") - - self._import_common(node, frame) - - if frame.toplevel and not node.target.startswith("_"): - self.writeline(f"context.exported_vars.discard({node.target!r})") - - def visit_FromImport(self, node: nodes.FromImport, frame: Frame) -> None: - """Visit named imports.""" - self.newline(node) - self.write("included_template = ") - self._import_common(node, frame) - var_names = [] - discarded_names = [] - for name in node.names: - if isinstance(name, tuple): - name, alias = name - else: - alias = name - self.writeline( - f"{frame.symbols.ref(alias)} =" - f" getattr(included_template, {name!r}, missing)" - ) - self.writeline(f"if {frame.symbols.ref(alias)} is missing:") - self.indent() - message = ( - "the template {included_template.__name__!r}" - f" (imported on {self.position(node)})" - f" does not export the requested name {name!r}" - ) - self.writeline( - f"{frame.symbols.ref(alias)} = undefined(f{message!r}, name={name!r})" - ) - self.outdent() - if frame.toplevel: - var_names.append(alias) - if not alias.startswith("_"): - discarded_names.append(alias) - - if var_names: - if len(var_names) == 1: - name = var_names[0] - self.writeline(f"context.vars[{name!r}] = {frame.symbols.ref(name)}") - else: - names_kv = ", ".join( - f"{name!r}: {frame.symbols.ref(name)}" for name in var_names - ) - self.writeline(f"context.vars.update({{{names_kv}}})") - if discarded_names: - if len(discarded_names) == 1: - self.writeline(f"context.exported_vars.discard({discarded_names[0]!r})") - else: - names_str = ", ".join(map(repr, discarded_names)) - self.writeline( - f"context.exported_vars.difference_update(({names_str}))" - ) - - def visit_For(self, node: nodes.For, frame: Frame) -> None: - loop_frame = frame.inner() - loop_frame.loop_frame = True - test_frame = frame.inner() - else_frame = frame.inner() - - # try to figure out if we have an extended loop. An extended loop - # is necessary if the loop is in recursive mode if the special loop - # variable is accessed in the body if the body is a scoped block. - extended_loop = ( - node.recursive - or "loop" - in find_undeclared(node.iter_child_nodes(only=("body",)), ("loop",)) - or any(block.scoped for block in node.find_all(nodes.Block)) - ) - - loop_ref = None - if extended_loop: - loop_ref = loop_frame.symbols.declare_parameter("loop") - - loop_frame.symbols.analyze_node(node, for_branch="body") - if node.else_: - else_frame.symbols.analyze_node(node, for_branch="else") - - if node.test: - loop_filter_func = self.temporary_identifier() - test_frame.symbols.analyze_node(node, for_branch="test") - self.writeline(f"{self.func(loop_filter_func)}(fiter):", node.test) - self.indent() - self.enter_frame(test_frame) - self.writeline(self.choose_async("async for ", "for ")) - self.visit(node.target, loop_frame) - self.write(" in ") - self.write(self.choose_async("auto_aiter(fiter)", "fiter")) - self.write(":") - self.indent() - self.writeline("if ", node.test) - self.visit(node.test, test_frame) - self.write(":") - self.indent() - self.writeline("yield ") - self.visit(node.target, loop_frame) - self.outdent(3) - self.leave_frame(test_frame, with_python_scope=True) - - # if we don't have an recursive loop we have to find the shadowed - # variables at that point. Because loops can be nested but the loop - # variable is a special one we have to enforce aliasing for it. - if node.recursive: - self.writeline( - f"{self.func('loop')}(reciter, loop_render_func, depth=0):", node - ) - self.indent() - self.buffer(loop_frame) - - # Use the same buffer for the else frame - else_frame.buffer = loop_frame.buffer - - # make sure the loop variable is a special one and raise a template - # assertion error if a loop tries to write to loop - if extended_loop: - self.writeline(f"{loop_ref} = missing") - - for name in node.find_all(nodes.Name): - if name.ctx == "store" and name.name == "loop": - self.fail( - "Can't assign to special loop variable in for-loop target", - name.lineno, - ) - - if node.else_: - iteration_indicator = self.temporary_identifier() - self.writeline(f"{iteration_indicator} = 1") - - self.writeline(self.choose_async("async for ", "for "), node) - self.visit(node.target, loop_frame) - if extended_loop: - self.write(f", {loop_ref} in {self.choose_async('Async')}LoopContext(") - else: - self.write(" in ") - - if node.test: - self.write(f"{loop_filter_func}(") - if node.recursive: - self.write("reciter") - else: - if self.environment.is_async and not extended_loop: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async and not extended_loop: - self.write(")") - if node.test: - self.write(")") - - if node.recursive: - self.write(", undefined, loop_render_func, depth):") - else: - self.write(", undefined):" if extended_loop else ":") - - self.indent() - self.enter_frame(loop_frame) - - self.writeline("_loop_vars = {}") - self.blockvisit(node.body, loop_frame) - if node.else_: - self.writeline(f"{iteration_indicator} = 0") - self.outdent() - self.leave_frame( - loop_frame, with_python_scope=node.recursive and not node.else_ - ) - - if node.else_: - self.writeline(f"if {iteration_indicator}:") - self.indent() - self.enter_frame(else_frame) - self.blockvisit(node.else_, else_frame) - self.leave_frame(else_frame) - self.outdent() - - # if the node was recursive we have to return the buffer contents - # and start the iteration code - if node.recursive: - self.return_buffer_contents(loop_frame) - self.outdent() - self.start_write(frame, node) - self.write(f"{self.choose_async('await ')}loop(") - if self.environment.is_async: - self.write("auto_aiter(") - self.visit(node.iter, frame) - if self.environment.is_async: - self.write(")") - self.write(", loop)") - self.end_write(frame) - - # at the end of the iteration, clear any assignments made in the - # loop from the top level - if self._assign_stack: - self._assign_stack[-1].difference_update(loop_frame.symbols.stores) - - def visit_If(self, node: nodes.If, frame: Frame) -> None: - if_frame = frame.soft() - self.writeline("if ", node) - self.visit(node.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(node.body, if_frame) - self.outdent() - for elif_ in node.elif_: - self.writeline("elif ", elif_) - self.visit(elif_.test, if_frame) - self.write(":") - self.indent() - self.blockvisit(elif_.body, if_frame) - self.outdent() - if node.else_: - self.writeline("else:") - self.indent() - self.blockvisit(node.else_, if_frame) - self.outdent() - - def visit_Macro(self, node: nodes.Macro, frame: Frame) -> None: - macro_frame, macro_ref = self.macro_body(node, frame) - self.newline() - if frame.toplevel: - if not node.name.startswith("_"): - self.write(f"context.exported_vars.add({node.name!r})") - self.writeline(f"context.vars[{node.name!r}] = ") - self.write(f"{frame.symbols.ref(node.name)} = ") - self.macro_def(macro_ref, macro_frame) - - def visit_CallBlock(self, node: nodes.CallBlock, frame: Frame) -> None: - call_frame, macro_ref = self.macro_body(node, frame) - self.writeline("caller = ") - self.macro_def(macro_ref, call_frame) - self.start_write(frame, node) - self.visit_Call(node.call, frame, forward_caller=True) - self.end_write(frame) - - def visit_FilterBlock(self, node: nodes.FilterBlock, frame: Frame) -> None: - filter_frame = frame.inner() - filter_frame.symbols.analyze_node(node) - self.enter_frame(filter_frame) - self.buffer(filter_frame) - self.blockvisit(node.body, filter_frame) - self.start_write(frame, node) - self.visit_Filter(node.filter, filter_frame) - self.end_write(frame) - self.leave_frame(filter_frame) - - def visit_With(self, node: nodes.With, frame: Frame) -> None: - with_frame = frame.inner() - with_frame.symbols.analyze_node(node) - self.enter_frame(with_frame) - for target, expr in zip(node.targets, node.values): - self.newline() - self.visit(target, with_frame) - self.write(" = ") - self.visit(expr, frame) - self.blockvisit(node.body, with_frame) - self.leave_frame(with_frame) - - def visit_ExprStmt(self, node: nodes.ExprStmt, frame: Frame) -> None: - self.newline(node) - self.visit(node.node, frame) - - class _FinalizeInfo(t.NamedTuple): - const: t.Optional[t.Callable[..., str]] - src: t.Optional[str] - - @staticmethod - def _default_finalize(value: t.Any) -> t.Any: - """The default finalize function if the environment isn't - configured with one. Or, if the environment has one, this is - called on that function's output for constants. - """ - return str(value) - - _finalize: t.Optional[_FinalizeInfo] = None - - def _make_finalize(self) -> _FinalizeInfo: - """Build the finalize function to be used on constants and at - runtime. Cached so it's only created once for all output nodes. - - Returns a ``namedtuple`` with the following attributes: - - ``const`` - A function to finalize constant data at compile time. - - ``src`` - Source code to output around nodes to be evaluated at - runtime. - """ - if self._finalize is not None: - return self._finalize - - finalize: t.Optional[t.Callable[..., t.Any]] - finalize = default = self._default_finalize - src = None - - if self.environment.finalize: - src = "environment.finalize(" - env_finalize = self.environment.finalize - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(env_finalize) # type: ignore - ) - finalize = None - - if pass_arg is None: - - def finalize(value: t.Any) -> t.Any: # noqa: F811 - return default(env_finalize(value)) - - else: - src = f"{src}{pass_arg}, " - - if pass_arg == "environment": - - def finalize(value: t.Any) -> t.Any: # noqa: F811 - return default(env_finalize(self.environment, value)) - - self._finalize = self._FinalizeInfo(finalize, src) - return self._finalize - - def _output_const_repr(self, group: t.Iterable[t.Any]) -> str: - """Given a group of constant values converted from ``Output`` - child nodes, produce a string to write to the template module - source. - """ - return repr(concat(group)) - - def _output_child_to_const( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> str: - """Try to optimize a child of an ``Output`` node by trying to - convert it to constant, finalized data at compile time. - - If :exc:`Impossible` is raised, the node is not constant and - will be evaluated at runtime. Any other exception will also be - evaluated at runtime for easier debugging. - """ - const = node.as_const(frame.eval_ctx) - - if frame.eval_ctx.autoescape: - const = escape(const) - - # Template data doesn't go through finalize. - if isinstance(node, nodes.TemplateData): - return str(const) - - return finalize.const(const) # type: ignore - - def _output_child_pre( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code before visiting a child of an - ``Output`` node. - """ - if frame.eval_ctx.volatile: - self.write("(escape if context.eval_ctx.autoescape else str)(") - elif frame.eval_ctx.autoescape: - self.write("escape(") - else: - self.write("str(") - - if finalize.src is not None: - self.write(finalize.src) - - def _output_child_post( - self, node: nodes.Expr, frame: Frame, finalize: _FinalizeInfo - ) -> None: - """Output extra source code after visiting a child of an - ``Output`` node. - """ - self.write(")") - - if finalize.src is not None: - self.write(")") - - def visit_Output(self, node: nodes.Output, frame: Frame) -> None: - # If an extends is active, don't render outside a block. - if frame.require_output_check: - # A top-level extends is known to exist at compile time. - if self.has_known_extends: - return - - self.writeline("if parent_template is None:") - self.indent() - - finalize = self._make_finalize() - body: t.List[t.Union[t.List[t.Any], nodes.Expr]] = [] - - # Evaluate constants at compile time if possible. Each item in - # body will be either a list of static data or a node to be - # evaluated at runtime. - for child in node.nodes: - try: - if not ( - # If the finalize function requires runtime context, - # constants can't be evaluated at compile time. - finalize.const - # Unless it's basic template data that won't be - # finalized anyway. - or isinstance(child, nodes.TemplateData) - ): - raise nodes.Impossible() - - const = self._output_child_to_const(child, frame, finalize) - except (nodes.Impossible, Exception): - # The node was not constant and needs to be evaluated at - # runtime. Or another error was raised, which is easier - # to debug at runtime. - body.append(child) - continue - - if body and isinstance(body[-1], list): - body[-1].append(const) - else: - body.append([const]) - - if frame.buffer is not None: - if len(body) == 1: - self.writeline(f"{frame.buffer}.append(") - else: - self.writeline(f"{frame.buffer}.extend((") - - self.indent() - - for item in body: - if isinstance(item, list): - # A group of constant data to join and output. - val = self._output_const_repr(item) - - if frame.buffer is None: - self.writeline("yield " + val) - else: - self.writeline(val + ",") - else: - if frame.buffer is None: - self.writeline("yield ", item) - else: - self.newline(item) - - # A node to be evaluated at runtime. - self._output_child_pre(item, frame, finalize) - self.visit(item, frame) - self._output_child_post(item, frame, finalize) - - if frame.buffer is not None: - self.write(",") - - if frame.buffer is not None: - self.outdent() - self.writeline(")" if len(body) == 1 else "))") - - if frame.require_output_check: - self.outdent() - - def visit_Assign(self, node: nodes.Assign, frame: Frame) -> None: - self.push_assign_tracking() - self.newline(node) - self.visit(node.target, frame) - self.write(" = ") - self.visit(node.node, frame) - self.pop_assign_tracking(frame) - - def visit_AssignBlock(self, node: nodes.AssignBlock, frame: Frame) -> None: - self.push_assign_tracking() - block_frame = frame.inner() - # This is a special case. Since a set block always captures we - # will disable output checks. This way one can use set blocks - # toplevel even in extended templates. - block_frame.require_output_check = False - block_frame.symbols.analyze_node(node) - self.enter_frame(block_frame) - self.buffer(block_frame) - self.blockvisit(node.body, block_frame) - self.newline(node) - self.visit(node.target, frame) - self.write(" = (Markup if context.eval_ctx.autoescape else identity)(") - if node.filter is not None: - self.visit_Filter(node.filter, block_frame) - else: - self.write(f"concat({block_frame.buffer})") - self.write(")") - self.pop_assign_tracking(frame) - self.leave_frame(block_frame) - - # -- Expression Visitors - - def visit_Name(self, node: nodes.Name, frame: Frame) -> None: - if node.ctx == "store" and ( - frame.toplevel or frame.loop_frame or frame.block_frame - ): - if self._assign_stack: - self._assign_stack[-1].add(node.name) - ref = frame.symbols.ref(node.name) - - # If we are looking up a variable we might have to deal with the - # case where it's undefined. We can skip that case if the load - # instruction indicates a parameter which are always defined. - if node.ctx == "load": - load = frame.symbols.find_load(ref) - if not ( - load is not None - and load[0] == VAR_LOAD_PARAMETER - and not self.parameter_is_undeclared(ref) - ): - self.write( - f"(undefined(name={node.name!r}) if {ref} is missing else {ref})" - ) - return - - self.write(ref) - - def visit_NSRef(self, node: nodes.NSRef, frame: Frame) -> None: - # NSRefs can only be used to store values; since they use the normal - # `foo.bar` notation they will be parsed as a normal attribute access - # when used anywhere but in a `set` context - ref = frame.symbols.ref(node.name) - self.writeline(f"if not isinstance({ref}, Namespace):") - self.indent() - self.writeline( - "raise TemplateRuntimeError" - '("cannot assign attribute on non-namespace object")' - ) - self.outdent() - self.writeline(f"{ref}[{node.attr!r}]") - - def visit_Const(self, node: nodes.Const, frame: Frame) -> None: - val = node.as_const(frame.eval_ctx) - if isinstance(val, float): - self.write(str(val)) - else: - self.write(repr(val)) - - def visit_TemplateData(self, node: nodes.TemplateData, frame: Frame) -> None: - try: - self.write(repr(node.as_const(frame.eval_ctx))) - except nodes.Impossible: - self.write( - f"(Markup if context.eval_ctx.autoescape else identity)({node.data!r})" - ) - - def visit_Tuple(self, node: nodes.Tuple, frame: Frame) -> None: - self.write("(") - idx = -1 - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write(",)" if idx == 0 else ")") - - def visit_List(self, node: nodes.List, frame: Frame) -> None: - self.write("[") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item, frame) - self.write("]") - - def visit_Dict(self, node: nodes.Dict, frame: Frame) -> None: - self.write("{") - for idx, item in enumerate(node.items): - if idx: - self.write(", ") - self.visit(item.key, frame) - self.write(": ") - self.visit(item.value, frame) - self.write("}") - - visit_Add = _make_binop("+") - visit_Sub = _make_binop("-") - visit_Mul = _make_binop("*") - visit_Div = _make_binop("/") - visit_FloorDiv = _make_binop("//") - visit_Pow = _make_binop("**") - visit_Mod = _make_binop("%") - visit_And = _make_binop("and") - visit_Or = _make_binop("or") - visit_Pos = _make_unop("+") - visit_Neg = _make_unop("-") - visit_Not = _make_unop("not ") - - @optimizeconst - def visit_Concat(self, node: nodes.Concat, frame: Frame) -> None: - if frame.eval_ctx.volatile: - func_name = "(markup_join if context.eval_ctx.volatile else str_join)" - elif frame.eval_ctx.autoescape: - func_name = "markup_join" - else: - func_name = "str_join" - self.write(f"{func_name}((") - for arg in node.nodes: - self.visit(arg, frame) - self.write(", ") - self.write("))") - - @optimizeconst - def visit_Compare(self, node: nodes.Compare, frame: Frame) -> None: - self.write("(") - self.visit(node.expr, frame) - for op in node.ops: - self.visit(op, frame) - self.write(")") - - def visit_Operand(self, node: nodes.Operand, frame: Frame) -> None: - self.write(f" {operators[node.op]} ") - self.visit(node.expr, frame) - - @optimizeconst - def visit_Getattr(self, node: nodes.Getattr, frame: Frame) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getattr(") - self.visit(node.node, frame) - self.write(f", {node.attr!r})") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Getitem(self, node: nodes.Getitem, frame: Frame) -> None: - # slices bypass the environment getitem method. - if isinstance(node.arg, nodes.Slice): - self.visit(node.node, frame) - self.write("[") - self.visit(node.arg, frame) - self.write("]") - else: - if self.environment.is_async: - self.write("(await auto_await(") - - self.write("environment.getitem(") - self.visit(node.node, frame) - self.write(", ") - self.visit(node.arg, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - def visit_Slice(self, node: nodes.Slice, frame: Frame) -> None: - if node.start is not None: - self.visit(node.start, frame) - self.write(":") - if node.stop is not None: - self.visit(node.stop, frame) - if node.step is not None: - self.write(":") - self.visit(node.step, frame) - - @contextmanager - def _filter_test_common( - self, node: t.Union[nodes.Filter, nodes.Test], frame: Frame, is_filter: bool - ) -> t.Iterator[None]: - if self.environment.is_async: - self.write("(await auto_await(") - - if is_filter: - self.write(f"{self.filters[node.name]}(") - func = self.environment.filters.get(node.name) - else: - self.write(f"{self.tests[node.name]}(") - func = self.environment.tests.get(node.name) - - # When inside an If or CondExpr frame, allow the filter to be - # undefined at compile time and only raise an error if it's - # actually called at runtime. See pull_dependencies. - if func is None and not frame.soft_frame: - type_name = "filter" if is_filter else "test" - self.fail(f"No {type_name} named {node.name!r}.", node.lineno) - - pass_arg = { - _PassArg.context: "context", - _PassArg.eval_context: "context.eval_ctx", - _PassArg.environment: "environment", - }.get( - _PassArg.from_obj(func) # type: ignore - ) - - if pass_arg is not None: - self.write(f"{pass_arg}, ") - - # Back to the visitor function to handle visiting the target of - # the filter or test. - yield - - self.signature(node, frame) - self.write(")") - - if self.environment.is_async: - self.write("))") - - @optimizeconst - def visit_Filter(self, node: nodes.Filter, frame: Frame) -> None: - with self._filter_test_common(node, frame, True): - # if the filter node is None we are inside a filter block - # and want to write to the current buffer - if node.node is not None: - self.visit(node.node, frame) - elif frame.eval_ctx.volatile: - self.write( - f"(Markup(concat({frame.buffer}))" - f" if context.eval_ctx.autoescape else concat({frame.buffer}))" - ) - elif frame.eval_ctx.autoescape: - self.write(f"Markup(concat({frame.buffer}))") - else: - self.write(f"concat({frame.buffer})") - - @optimizeconst - def visit_Test(self, node: nodes.Test, frame: Frame) -> None: - with self._filter_test_common(node, frame, False): - self.visit(node.node, frame) - - @optimizeconst - def visit_CondExpr(self, node: nodes.CondExpr, frame: Frame) -> None: - frame = frame.soft() - - def write_expr2() -> None: - if node.expr2 is not None: - self.visit(node.expr2, frame) - return - - self.write( - f'cond_expr_undefined("the inline if-expression on' - f" {self.position(node)} evaluated to false and no else" - f' section was defined.")' - ) - - self.write("(") - self.visit(node.expr1, frame) - self.write(" if ") - self.visit(node.test, frame) - self.write(" else ") - write_expr2() - self.write(")") - - @optimizeconst - def visit_Call( - self, node: nodes.Call, frame: Frame, forward_caller: bool = False - ) -> None: - if self.environment.is_async: - self.write("(await auto_await(") - if self.environment.sandboxed: - self.write("environment.call(context, ") - else: - self.write("context.call(") - self.visit(node.node, frame) - extra_kwargs = {"caller": "caller"} if forward_caller else None - loop_kwargs = {"_loop_vars": "_loop_vars"} if frame.loop_frame else {} - block_kwargs = {"_block_vars": "_block_vars"} if frame.block_frame else {} - if extra_kwargs: - extra_kwargs.update(loop_kwargs, **block_kwargs) - elif loop_kwargs or block_kwargs: - extra_kwargs = dict(loop_kwargs, **block_kwargs) - self.signature(node, frame, extra_kwargs) - self.write(")") - if self.environment.is_async: - self.write("))") - - def visit_Keyword(self, node: nodes.Keyword, frame: Frame) -> None: - self.write(node.key + "=") - self.visit(node.value, frame) - - # -- Unused nodes for extensions - - def visit_MarkSafe(self, node: nodes.MarkSafe, frame: Frame) -> None: - self.write("Markup(") - self.visit(node.expr, frame) - self.write(")") - - def visit_MarkSafeIfAutoescape( - self, node: nodes.MarkSafeIfAutoescape, frame: Frame - ) -> None: - self.write("(Markup if context.eval_ctx.autoescape else identity)(") - self.visit(node.expr, frame) - self.write(")") - - def visit_EnvironmentAttribute( - self, node: nodes.EnvironmentAttribute, frame: Frame - ) -> None: - self.write("environment." + node.name) - - def visit_ExtensionAttribute( - self, node: nodes.ExtensionAttribute, frame: Frame - ) -> None: - self.write(f"environment.extensions[{node.identifier!r}].{node.name}") - - def visit_ImportedName(self, node: nodes.ImportedName, frame: Frame) -> None: - self.write(self.import_aliases[node.importname]) - - def visit_InternalName(self, node: nodes.InternalName, frame: Frame) -> None: - self.write(node.name) - - def visit_ContextReference( - self, node: nodes.ContextReference, frame: Frame - ) -> None: - self.write("context") - - def visit_DerivedContextReference( - self, node: nodes.DerivedContextReference, frame: Frame - ) -> None: - self.write(self.derive_context(frame)) - - def visit_Continue(self, node: nodes.Continue, frame: Frame) -> None: - self.writeline("continue", node) - - def visit_Break(self, node: nodes.Break, frame: Frame) -> None: - self.writeline("break", node) - - def visit_Scope(self, node: nodes.Scope, frame: Frame) -> None: - scope_frame = frame.inner() - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - - def visit_OverlayScope(self, node: nodes.OverlayScope, frame: Frame) -> None: - ctx = self.temporary_identifier() - self.writeline(f"{ctx} = {self.derive_context(frame)}") - self.writeline(f"{ctx}.vars = ") - self.visit(node.context, frame) - self.push_context_reference(ctx) - - scope_frame = frame.inner(isolated=True) - scope_frame.symbols.analyze_node(node) - self.enter_frame(scope_frame) - self.blockvisit(node.body, scope_frame) - self.leave_frame(scope_frame) - self.pop_context_reference() - - def visit_EvalContextModifier( - self, node: nodes.EvalContextModifier, frame: Frame - ) -> None: - for keyword in node.options: - self.writeline(f"context.eval_ctx.{keyword.key} = ") - self.visit(keyword.value, frame) - try: - val = keyword.value.as_const(frame.eval_ctx) - except nodes.Impossible: - frame.eval_ctx.volatile = True - else: - setattr(frame.eval_ctx, keyword.key, val) - - def visit_ScopedEvalContextModifier( - self, node: nodes.ScopedEvalContextModifier, frame: Frame - ) -> None: - old_ctx_name = self.temporary_identifier() - saved_ctx = frame.eval_ctx.save() - self.writeline(f"{old_ctx_name} = context.eval_ctx.save()") - self.visit_EvalContextModifier(node, frame) - for child in node.body: - self.visit(child, frame) - frame.eval_ctx.revert(saved_ctx) - self.writeline(f"context.eval_ctx.revert({old_ctx_name})") diff --git a/myenv/Lib/site-packages/jinja2/constants.py b/myenv/Lib/site-packages/jinja2/constants.py deleted file mode 100644 index 41a1c23..0000000 --- a/myenv/Lib/site-packages/jinja2/constants.py +++ /dev/null @@ -1,20 +0,0 @@ -#: list of lorem ipsum words used by the lipsum() helper function -LOREM_IPSUM_WORDS = """\ -a ac accumsan ad adipiscing aenean aliquam aliquet amet ante aptent arcu at -auctor augue bibendum blandit class commodo condimentum congue consectetuer -consequat conubia convallis cras cubilia cum curabitur curae cursus dapibus -diam dictum dictumst dignissim dis dolor donec dui duis egestas eget eleifend -elementum elit enim erat eros est et etiam eu euismod facilisi facilisis fames -faucibus felis fermentum feugiat fringilla fusce gravida habitant habitasse hac -hendrerit hymenaeos iaculis id imperdiet in inceptos integer interdum ipsum -justo lacinia lacus laoreet lectus leo libero ligula litora lobortis lorem -luctus maecenas magna magnis malesuada massa mattis mauris metus mi molestie -mollis montes morbi mus nam nascetur natoque nec neque netus nibh nisi nisl non -nonummy nostra nulla nullam nunc odio orci ornare parturient pede pellentesque -penatibus per pharetra phasellus placerat platea porta porttitor posuere -potenti praesent pretium primis proin pulvinar purus quam quis quisque rhoncus -ridiculus risus rutrum sagittis sapien scelerisque sed sem semper senectus sit -sociis sociosqu sodales sollicitudin suscipit suspendisse taciti tellus tempor -tempus tincidunt torquent tortor tristique turpis ullamcorper ultrices -ultricies urna ut varius vehicula vel velit venenatis vestibulum vitae vivamus -viverra volutpat vulputate""" diff --git a/myenv/Lib/site-packages/jinja2/debug.py b/myenv/Lib/site-packages/jinja2/debug.py deleted file mode 100644 index 7ed7e92..0000000 --- a/myenv/Lib/site-packages/jinja2/debug.py +++ /dev/null @@ -1,191 +0,0 @@ -import sys -import typing as t -from types import CodeType -from types import TracebackType - -from .exceptions import TemplateSyntaxError -from .utils import internal_code -from .utils import missing - -if t.TYPE_CHECKING: - from .runtime import Context - - -def rewrite_traceback_stack(source: t.Optional[str] = None) -> BaseException: - """Rewrite the current exception to replace any tracebacks from - within compiled template code with tracebacks that look like they - came from the template source. - - This must be called within an ``except`` block. - - :param source: For ``TemplateSyntaxError``, the original source if - known. - :return: The original exception with the rewritten traceback. - """ - _, exc_value, tb = sys.exc_info() - exc_value = t.cast(BaseException, exc_value) - tb = t.cast(TracebackType, tb) - - if isinstance(exc_value, TemplateSyntaxError) and not exc_value.translated: - exc_value.translated = True - exc_value.source = source - # Remove the old traceback, otherwise the frames from the - # compiler still show up. - exc_value.with_traceback(None) - # Outside of runtime, so the frame isn't executing template - # code, but it still needs to point at the template. - tb = fake_traceback( - exc_value, None, exc_value.filename or "", exc_value.lineno - ) - else: - # Skip the frame for the render function. - tb = tb.tb_next - - stack = [] - - # Build the stack of traceback object, replacing any in template - # code with the source file and line information. - while tb is not None: - # Skip frames decorated with @internalcode. These are internal - # calls that aren't useful in template debugging output. - if tb.tb_frame.f_code in internal_code: - tb = tb.tb_next - continue - - template = tb.tb_frame.f_globals.get("__jinja_template__") - - if template is not None: - lineno = template.get_corresponding_lineno(tb.tb_lineno) - fake_tb = fake_traceback(exc_value, tb, template.filename, lineno) - stack.append(fake_tb) - else: - stack.append(tb) - - tb = tb.tb_next - - tb_next = None - - # Assign tb_next in reverse to avoid circular references. - for tb in reversed(stack): - tb.tb_next = tb_next - tb_next = tb - - return exc_value.with_traceback(tb_next) - - -def fake_traceback( # type: ignore - exc_value: BaseException, tb: t.Optional[TracebackType], filename: str, lineno: int -) -> TracebackType: - """Produce a new traceback object that looks like it came from the - template source instead of the compiled code. The filename, line - number, and location name will point to the template, and the local - variables will be the current template context. - - :param exc_value: The original exception to be re-raised to create - the new traceback. - :param tb: The original traceback to get the local variables and - code info from. - :param filename: The template filename. - :param lineno: The line number in the template source. - """ - if tb is not None: - # Replace the real locals with the context that would be - # available at that point in the template. - locals = get_template_locals(tb.tb_frame.f_locals) - locals.pop("__jinja_exception__", None) - else: - locals = {} - - globals = { - "__name__": filename, - "__file__": filename, - "__jinja_exception__": exc_value, - } - # Raise an exception at the correct line number. - code: CodeType = compile( - "\n" * (lineno - 1) + "raise __jinja_exception__", filename, "exec" - ) - - # Build a new code object that points to the template file and - # replaces the location with a block name. - location = "template" - - if tb is not None: - function = tb.tb_frame.f_code.co_name - - if function == "root": - location = "top-level template code" - elif function.startswith("block_"): - location = f"block {function[6:]!r}" - - if sys.version_info >= (3, 8): - code = code.replace(co_name=location) - else: - code = CodeType( - code.co_argcount, - code.co_kwonlyargcount, - code.co_nlocals, - code.co_stacksize, - code.co_flags, - code.co_code, - code.co_consts, - code.co_names, - code.co_varnames, - code.co_filename, - location, - code.co_firstlineno, - code.co_lnotab, - code.co_freevars, - code.co_cellvars, - ) - - # Execute the new code, which is guaranteed to raise, and return - # the new traceback without this frame. - try: - exec(code, globals, locals) - except BaseException: - return sys.exc_info()[2].tb_next # type: ignore - - -def get_template_locals(real_locals: t.Mapping[str, t.Any]) -> t.Dict[str, t.Any]: - """Based on the runtime locals, get the context that would be - available at that point in the template. - """ - # Start with the current template context. - ctx: "t.Optional[Context]" = real_locals.get("context") - - if ctx is not None: - data: t.Dict[str, t.Any] = ctx.get_all().copy() - else: - data = {} - - # Might be in a derived context that only sets local variables - # rather than pushing a context. Local variables follow the scheme - # l_depth_name. Find the highest-depth local that has a value for - # each name. - local_overrides: t.Dict[str, t.Tuple[int, t.Any]] = {} - - for name, value in real_locals.items(): - if not name.startswith("l_") or value is missing: - # Not a template variable, or no longer relevant. - continue - - try: - _, depth_str, name = name.split("_", 2) - depth = int(depth_str) - except ValueError: - continue - - cur_depth = local_overrides.get(name, (-1,))[0] - - if cur_depth < depth: - local_overrides[name] = (depth, value) - - # Modify the context with any derived context. - for name, (_, value) in local_overrides.items(): - if value is missing: - data.pop(name, None) - else: - data[name] = value - - return data diff --git a/myenv/Lib/site-packages/jinja2/defaults.py b/myenv/Lib/site-packages/jinja2/defaults.py deleted file mode 100644 index 638cad3..0000000 --- a/myenv/Lib/site-packages/jinja2/defaults.py +++ /dev/null @@ -1,48 +0,0 @@ -import typing as t - -from .filters import FILTERS as DEFAULT_FILTERS # noqa: F401 -from .tests import TESTS as DEFAULT_TESTS # noqa: F401 -from .utils import Cycler -from .utils import generate_lorem_ipsum -from .utils import Joiner -from .utils import Namespace - -if t.TYPE_CHECKING: - import typing_extensions as te - -# defaults for the parser / lexer -BLOCK_START_STRING = "{%" -BLOCK_END_STRING = "%}" -VARIABLE_START_STRING = "{{" -VARIABLE_END_STRING = "}}" -COMMENT_START_STRING = "{#" -COMMENT_END_STRING = "#}" -LINE_STATEMENT_PREFIX: t.Optional[str] = None -LINE_COMMENT_PREFIX: t.Optional[str] = None -TRIM_BLOCKS = False -LSTRIP_BLOCKS = False -NEWLINE_SEQUENCE: "te.Literal['\\n', '\\r\\n', '\\r']" = "\n" -KEEP_TRAILING_NEWLINE = False - -# default filters, tests and namespace - -DEFAULT_NAMESPACE = { - "range": range, - "dict": dict, - "lipsum": generate_lorem_ipsum, - "cycler": Cycler, - "joiner": Joiner, - "namespace": Namespace, -} - -# default policies -DEFAULT_POLICIES: t.Dict[str, t.Any] = { - "compiler.ascii_str": True, - "urlize.rel": "noopener", - "urlize.target": None, - "urlize.extra_schemes": None, - "truncate.leeway": 5, - "json.dumps_function": None, - "json.dumps_kwargs": {"sort_keys": True}, - "ext.i18n.trimmed": False, -} diff --git a/myenv/Lib/site-packages/jinja2/environment.py b/myenv/Lib/site-packages/jinja2/environment.py deleted file mode 100644 index 1d3be0b..0000000 --- a/myenv/Lib/site-packages/jinja2/environment.py +++ /dev/null @@ -1,1675 +0,0 @@ -"""Classes for managing templates and their runtime and compile time -options. -""" - -import os -import typing -import typing as t -import weakref -from collections import ChainMap -from functools import lru_cache -from functools import partial -from functools import reduce -from types import CodeType - -from markupsafe import Markup - -from . import nodes -from .compiler import CodeGenerator -from .compiler import generate -from .defaults import BLOCK_END_STRING -from .defaults import BLOCK_START_STRING -from .defaults import COMMENT_END_STRING -from .defaults import COMMENT_START_STRING -from .defaults import DEFAULT_FILTERS # type: ignore[attr-defined] -from .defaults import DEFAULT_NAMESPACE -from .defaults import DEFAULT_POLICIES -from .defaults import DEFAULT_TESTS # type: ignore[attr-defined] -from .defaults import KEEP_TRAILING_NEWLINE -from .defaults import LINE_COMMENT_PREFIX -from .defaults import LINE_STATEMENT_PREFIX -from .defaults import LSTRIP_BLOCKS -from .defaults import NEWLINE_SEQUENCE -from .defaults import TRIM_BLOCKS -from .defaults import VARIABLE_END_STRING -from .defaults import VARIABLE_START_STRING -from .exceptions import TemplateNotFound -from .exceptions import TemplateRuntimeError -from .exceptions import TemplatesNotFound -from .exceptions import TemplateSyntaxError -from .exceptions import UndefinedError -from .lexer import get_lexer -from .lexer import Lexer -from .lexer import TokenStream -from .nodes import EvalContext -from .parser import Parser -from .runtime import Context -from .runtime import new_context -from .runtime import Undefined -from .utils import _PassArg -from .utils import concat -from .utils import consume -from .utils import import_string -from .utils import internalcode -from .utils import LRUCache -from .utils import missing - -if t.TYPE_CHECKING: - import typing_extensions as te - - from .bccache import BytecodeCache - from .ext import Extension - from .loaders import BaseLoader - -_env_bound = t.TypeVar("_env_bound", bound="Environment") - - -# for direct template usage we have up to ten living environments -@lru_cache(maxsize=10) -def get_spontaneous_environment(cls: t.Type[_env_bound], *args: t.Any) -> _env_bound: - """Return a new spontaneous environment. A spontaneous environment - is used for templates created directly rather than through an - existing environment. - - :param cls: Environment class to create. - :param args: Positional arguments passed to environment. - """ - env = cls(*args) - env.shared = True - return env - - -def create_cache( - size: int, -) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: - """Return the cache class for the given size.""" - if size == 0: - return None - - if size < 0: - return {} - - return LRUCache(size) # type: ignore - - -def copy_cache( - cache: t.Optional[t.MutableMapping[t.Any, t.Any]], -) -> t.Optional[t.MutableMapping[t.Tuple["weakref.ref[t.Any]", str], "Template"]]: - """Create an empty copy of the given cache.""" - if cache is None: - return None - - if type(cache) is dict: # noqa E721 - return {} - - return LRUCache(cache.capacity) # type: ignore - - -def load_extensions( - environment: "Environment", - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]], -) -> t.Dict[str, "Extension"]: - """Load the extensions from the list and bind it to the environment. - Returns a dict of instantiated extensions. - """ - result = {} - - for extension in extensions: - if isinstance(extension, str): - extension = t.cast(t.Type["Extension"], import_string(extension)) - - result[extension.identifier] = extension(environment) - - return result - - -def _environment_config_check(environment: "Environment") -> "Environment": - """Perform a sanity check on the environment.""" - assert issubclass( - environment.undefined, Undefined - ), "'undefined' must be a subclass of 'jinja2.Undefined'." - assert ( - environment.block_start_string - != environment.variable_start_string - != environment.comment_start_string - ), "block, variable and comment start strings must be different." - assert environment.newline_sequence in { - "\r", - "\r\n", - "\n", - }, "'newline_sequence' must be one of '\\n', '\\r\\n', or '\\r'." - return environment - - -class Environment: - r"""The core component of Jinja is the `Environment`. It contains - important shared variables like configuration, filters, tests, - globals and others. Instances of this class may be modified if - they are not shared and if no template was loaded so far. - Modifications on environments after the first template was loaded - will lead to surprising effects and undefined behavior. - - Here are the possible initialization parameters: - - `block_start_string` - The string marking the beginning of a block. Defaults to ``'{%'``. - - `block_end_string` - The string marking the end of a block. Defaults to ``'%}'``. - - `variable_start_string` - The string marking the beginning of a print statement. - Defaults to ``'{{'``. - - `variable_end_string` - The string marking the end of a print statement. Defaults to - ``'}}'``. - - `comment_start_string` - The string marking the beginning of a comment. Defaults to ``'{#'``. - - `comment_end_string` - The string marking the end of a comment. Defaults to ``'#}'``. - - `line_statement_prefix` - If given and a string, this will be used as prefix for line based - statements. See also :ref:`line-statements`. - - `line_comment_prefix` - If given and a string, this will be used as prefix for line based - comments. See also :ref:`line-statements`. - - .. versionadded:: 2.2 - - `trim_blocks` - If this is set to ``True`` the first newline after a block is - removed (block, not variable tag!). Defaults to `False`. - - `lstrip_blocks` - If this is set to ``True`` leading spaces and tabs are stripped - from the start of a line to a block. Defaults to `False`. - - `newline_sequence` - The sequence that starts a newline. Must be one of ``'\r'``, - ``'\n'`` or ``'\r\n'``. The default is ``'\n'`` which is a - useful default for Linux and OS X systems as well as web - applications. - - `keep_trailing_newline` - Preserve the trailing newline when rendering templates. - The default is ``False``, which causes a single newline, - if present, to be stripped from the end of the template. - - .. versionadded:: 2.7 - - `extensions` - List of Jinja extensions to use. This can either be import paths - as strings or extension classes. For more information have a - look at :ref:`the extensions documentation `. - - `optimized` - should the optimizer be enabled? Default is ``True``. - - `undefined` - :class:`Undefined` or a subclass of it that is used to represent - undefined values in the template. - - `finalize` - A callable that can be used to process the result of a variable - expression before it is output. For example one can convert - ``None`` implicitly into an empty string here. - - `autoescape` - If set to ``True`` the XML/HTML autoescaping feature is enabled by - default. For more details about autoescaping see - :class:`~markupsafe.Markup`. As of Jinja 2.4 this can also - be a callable that is passed the template name and has to - return ``True`` or ``False`` depending on autoescape should be - enabled by default. - - .. versionchanged:: 2.4 - `autoescape` can now be a function - - `loader` - The template loader for this environment. - - `cache_size` - The size of the cache. Per default this is ``400`` which means - that if more than 400 templates are loaded the loader will clean - out the least recently used template. If the cache size is set to - ``0`` templates are recompiled all the time, if the cache size is - ``-1`` the cache will not be cleaned. - - .. versionchanged:: 2.8 - The cache size was increased to 400 from a low 50. - - `auto_reload` - Some loaders load templates from locations where the template - sources may change (ie: file system or database). If - ``auto_reload`` is set to ``True`` (default) every time a template is - requested the loader checks if the source changed and if yes, it - will reload the template. For higher performance it's possible to - disable that. - - `bytecode_cache` - If set to a bytecode cache object, this object will provide a - cache for the internal Jinja bytecode so that templates don't - have to be parsed if they were not changed. - - See :ref:`bytecode-cache` for more information. - - `enable_async` - If set to true this enables async template execution which - allows using async functions and generators. - """ - - #: if this environment is sandboxed. Modifying this variable won't make - #: the environment sandboxed though. For a real sandboxed environment - #: have a look at jinja2.sandbox. This flag alone controls the code - #: generation by the compiler. - sandboxed = False - - #: True if the environment is just an overlay - overlayed = False - - #: the environment this environment is linked to if it is an overlay - linked_to: t.Optional["Environment"] = None - - #: shared environments have this set to `True`. A shared environment - #: must not be modified - shared = False - - #: the class that is used for code generation. See - #: :class:`~jinja2.compiler.CodeGenerator` for more information. - code_generator_class: t.Type["CodeGenerator"] = CodeGenerator - - concat = "".join - - #: the context class that is used for templates. See - #: :class:`~jinja2.runtime.Context` for more information. - context_class: t.Type[Context] = Context - - template_class: t.Type["Template"] - - def __init__( - self, - block_start_string: str = BLOCK_START_STRING, - block_end_string: str = BLOCK_END_STRING, - variable_start_string: str = VARIABLE_START_STRING, - variable_end_string: str = VARIABLE_END_STRING, - comment_start_string: str = COMMENT_START_STRING, - comment_end_string: str = COMMENT_END_STRING, - line_statement_prefix: t.Optional[str] = LINE_STATEMENT_PREFIX, - line_comment_prefix: t.Optional[str] = LINE_COMMENT_PREFIX, - trim_blocks: bool = TRIM_BLOCKS, - lstrip_blocks: bool = LSTRIP_BLOCKS, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = NEWLINE_SEQUENCE, - keep_trailing_newline: bool = KEEP_TRAILING_NEWLINE, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = (), - optimized: bool = True, - undefined: t.Type[Undefined] = Undefined, - finalize: t.Optional[t.Callable[..., t.Any]] = None, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = False, - loader: t.Optional["BaseLoader"] = None, - cache_size: int = 400, - auto_reload: bool = True, - bytecode_cache: t.Optional["BytecodeCache"] = None, - enable_async: bool = False, - ): - # !!Important notice!! - # The constructor accepts quite a few arguments that should be - # passed by keyword rather than position. However it's important to - # not change the order of arguments because it's used at least - # internally in those cases: - # - spontaneous environments (i18n extension and Template) - # - unittests - # If parameter changes are required only add parameters at the end - # and don't change the arguments (or the defaults!) of the arguments - # existing already. - - # lexer / parser information - self.block_start_string = block_start_string - self.block_end_string = block_end_string - self.variable_start_string = variable_start_string - self.variable_end_string = variable_end_string - self.comment_start_string = comment_start_string - self.comment_end_string = comment_end_string - self.line_statement_prefix = line_statement_prefix - self.line_comment_prefix = line_comment_prefix - self.trim_blocks = trim_blocks - self.lstrip_blocks = lstrip_blocks - self.newline_sequence = newline_sequence - self.keep_trailing_newline = keep_trailing_newline - - # runtime information - self.undefined: t.Type[Undefined] = undefined - self.optimized = optimized - self.finalize = finalize - self.autoescape = autoescape - - # defaults - self.filters = DEFAULT_FILTERS.copy() - self.tests = DEFAULT_TESTS.copy() - self.globals = DEFAULT_NAMESPACE.copy() - - # set the loader provided - self.loader = loader - self.cache = create_cache(cache_size) - self.bytecode_cache = bytecode_cache - self.auto_reload = auto_reload - - # configurable policies - self.policies = DEFAULT_POLICIES.copy() - - # load extensions - self.extensions = load_extensions(self, extensions) - - self.is_async = enable_async - _environment_config_check(self) - - def add_extension(self, extension: t.Union[str, t.Type["Extension"]]) -> None: - """Adds an extension after the environment was created. - - .. versionadded:: 2.5 - """ - self.extensions.update(load_extensions(self, [extension])) - - def extend(self, **attributes: t.Any) -> None: - """Add the items to the instance of the environment if they do not exist - yet. This is used by :ref:`extensions ` to register - callbacks and configuration values without breaking inheritance. - """ - for key, value in attributes.items(): - if not hasattr(self, key): - setattr(self, key, value) - - def overlay( - self, - block_start_string: str = missing, - block_end_string: str = missing, - variable_start_string: str = missing, - variable_end_string: str = missing, - comment_start_string: str = missing, - comment_end_string: str = missing, - line_statement_prefix: t.Optional[str] = missing, - line_comment_prefix: t.Optional[str] = missing, - trim_blocks: bool = missing, - lstrip_blocks: bool = missing, - newline_sequence: "te.Literal['\\n', '\\r\\n', '\\r']" = missing, - keep_trailing_newline: bool = missing, - extensions: t.Sequence[t.Union[str, t.Type["Extension"]]] = missing, - optimized: bool = missing, - undefined: t.Type[Undefined] = missing, - finalize: t.Optional[t.Callable[..., t.Any]] = missing, - autoescape: t.Union[bool, t.Callable[[t.Optional[str]], bool]] = missing, - loader: t.Optional["BaseLoader"] = missing, - cache_size: int = missing, - auto_reload: bool = missing, - bytecode_cache: t.Optional["BytecodeCache"] = missing, - enable_async: bool = False, - ) -> "Environment": - """Create a new overlay environment that shares all the data with the - current environment except for cache and the overridden attributes. - Extensions cannot be removed for an overlayed environment. An overlayed - environment automatically gets all the extensions of the environment it - is linked to plus optional extra extensions. - - Creating overlays should happen after the initial environment was set - up completely. Not all attributes are truly linked, some are just - copied over so modifications on the original environment may not shine - through. - - .. versionchanged:: 3.1.2 - Added the ``newline_sequence``,, ``keep_trailing_newline``, - and ``enable_async`` parameters to match ``__init__``. - """ - args = dict(locals()) - del args["self"], args["cache_size"], args["extensions"], args["enable_async"] - - rv = object.__new__(self.__class__) - rv.__dict__.update(self.__dict__) - rv.overlayed = True - rv.linked_to = self - - for key, value in args.items(): - if value is not missing: - setattr(rv, key, value) - - if cache_size is not missing: - rv.cache = create_cache(cache_size) - else: - rv.cache = copy_cache(self.cache) - - rv.extensions = {} - for key, value in self.extensions.items(): - rv.extensions[key] = value.bind(rv) - if extensions is not missing: - rv.extensions.update(load_extensions(rv, extensions)) - - if enable_async is not missing: - rv.is_async = enable_async - - return _environment_config_check(rv) - - @property - def lexer(self) -> Lexer: - """The lexer for this environment.""" - return get_lexer(self) - - def iter_extensions(self) -> t.Iterator["Extension"]: - """Iterates over the extensions by priority.""" - return iter(sorted(self.extensions.values(), key=lambda x: x.priority)) - - def getitem( - self, obj: t.Any, argument: t.Union[str, t.Any] - ) -> t.Union[t.Any, Undefined]: - """Get an item or attribute of an object but prefer the item.""" - try: - return obj[argument] - except (AttributeError, TypeError, LookupError): - if isinstance(argument, str): - try: - attr = str(argument) - except Exception: - pass - else: - try: - return getattr(obj, attr) - except AttributeError: - pass - return self.undefined(obj=obj, name=argument) - - def getattr(self, obj: t.Any, attribute: str) -> t.Any: - """Get an item or attribute of an object but prefer the attribute. - Unlike :meth:`getitem` the attribute *must* be a string. - """ - try: - return getattr(obj, attribute) - except AttributeError: - pass - try: - return obj[attribute] - except (TypeError, LookupError, AttributeError): - return self.undefined(obj=obj, name=attribute) - - def _filter_test_common( - self, - name: t.Union[str, Undefined], - value: t.Any, - args: t.Optional[t.Sequence[t.Any]], - kwargs: t.Optional[t.Mapping[str, t.Any]], - context: t.Optional[Context], - eval_ctx: t.Optional[EvalContext], - is_filter: bool, - ) -> t.Any: - if is_filter: - env_map = self.filters - type_name = "filter" - else: - env_map = self.tests - type_name = "test" - - func = env_map.get(name) # type: ignore - - if func is None: - msg = f"No {type_name} named {name!r}." - - if isinstance(name, Undefined): - try: - name._fail_with_undefined_error() - except Exception as e: - msg = f"{msg} ({e}; did you forget to quote the callable name?)" - - raise TemplateRuntimeError(msg) - - args = [value, *(args if args is not None else ())] - kwargs = kwargs if kwargs is not None else {} - pass_arg = _PassArg.from_obj(func) - - if pass_arg is _PassArg.context: - if context is None: - raise TemplateRuntimeError( - f"Attempted to invoke a context {type_name} without context." - ) - - args.insert(0, context) - elif pass_arg is _PassArg.eval_context: - if eval_ctx is None: - if context is not None: - eval_ctx = context.eval_ctx - else: - eval_ctx = EvalContext(self) - - args.insert(0, eval_ctx) - elif pass_arg is _PassArg.environment: - args.insert(0, self) - - return func(*args, **kwargs) - - def call_filter( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a filter on a value the same way the compiler does. - - This might return a coroutine if the filter is running from an - environment in async mode and the filter supports async - execution. It's your responsibility to await this if needed. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, True - ) - - def call_test( - self, - name: str, - value: t.Any, - args: t.Optional[t.Sequence[t.Any]] = None, - kwargs: t.Optional[t.Mapping[str, t.Any]] = None, - context: t.Optional[Context] = None, - eval_ctx: t.Optional[EvalContext] = None, - ) -> t.Any: - """Invoke a test on a value the same way the compiler does. - - This might return a coroutine if the test is running from an - environment in async mode and the test supports async execution. - It's your responsibility to await this if needed. - - .. versionchanged:: 3.0 - Tests support ``@pass_context``, etc. decorators. Added - the ``context`` and ``eval_ctx`` parameters. - - .. versionadded:: 2.7 - """ - return self._filter_test_common( - name, value, args, kwargs, context, eval_ctx, False - ) - - @internalcode - def parse( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> nodes.Template: - """Parse the sourcecode and return the abstract syntax tree. This - tree of nodes is used by the compiler to convert the template into - executable source- or bytecode. This is useful for debugging or to - extract information from templates. - - If you are :ref:`developing Jinja extensions ` - this gives you a good overview of the node tree generated. - """ - try: - return self._parse(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def _parse( - self, source: str, name: t.Optional[str], filename: t.Optional[str] - ) -> nodes.Template: - """Internal parsing function used by `parse` and `compile`.""" - return Parser(self, source, name, filename).parse() - - def lex( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> t.Iterator[t.Tuple[int, str, str]]: - """Lex the given sourcecode and return a generator that yields - tokens as tuples in the form ``(lineno, token_type, value)``. - This can be useful for :ref:`extension development ` - and debugging templates. - - This does not perform preprocessing. If you want the preprocessing - of the extensions to be applied you have to filter source through - the :meth:`preprocess` method. - """ - source = str(source) - try: - return self.lexer.tokeniter(source, name, filename) - except TemplateSyntaxError: - self.handle_exception(source=source) - - def preprocess( - self, - source: str, - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - ) -> str: - """Preprocesses the source with all extensions. This is automatically - called for all parsing and compiling methods but *not* for :meth:`lex` - because there you usually only want the actual source tokenized. - """ - return reduce( - lambda s, e: e.preprocess(s, name, filename), - self.iter_extensions(), - str(source), - ) - - def _tokenize( - self, - source: str, - name: t.Optional[str], - filename: t.Optional[str] = None, - state: t.Optional[str] = None, - ) -> TokenStream: - """Called by the parser to do the preprocessing and filtering - for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`. - """ - source = self.preprocess(source, name, filename) - stream = self.lexer.tokenize(source, name, filename, state) - - for ext in self.iter_extensions(): - stream = ext.filter_stream(stream) # type: ignore - - if not isinstance(stream, TokenStream): - stream = TokenStream(stream, name, filename) - - return stream - - def _generate( - self, - source: nodes.Template, - name: t.Optional[str], - filename: t.Optional[str], - defer_init: bool = False, - ) -> str: - """Internal hook that can be overridden to hook a different generate - method in. - - .. versionadded:: 2.5 - """ - return generate( # type: ignore - source, - self, - name, - filename, - defer_init=defer_init, - optimized=self.optimized, - ) - - def _compile(self, source: str, filename: str) -> CodeType: - """Internal hook that can be overridden to hook a different compile - method in. - - .. versionadded:: 2.5 - """ - return compile(source, filename, "exec") - - @typing.overload - def compile( # type: ignore - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[False]" = False, - defer_init: bool = False, - ) -> CodeType: ... - - @typing.overload - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: "te.Literal[True]" = ..., - defer_init: bool = False, - ) -> str: ... - - @internalcode - def compile( - self, - source: t.Union[str, nodes.Template], - name: t.Optional[str] = None, - filename: t.Optional[str] = None, - raw: bool = False, - defer_init: bool = False, - ) -> t.Union[str, CodeType]: - """Compile a node or template source code. The `name` parameter is - the load name of the template after it was joined using - :meth:`join_path` if necessary, not the filename on the file system. - the `filename` parameter is the estimated filename of the template on - the file system. If the template came from a database or memory this - can be omitted. - - The return value of this method is a python code object. If the `raw` - parameter is `True` the return value will be a string with python - code equivalent to the bytecode returned otherwise. This method is - mainly used internally. - - `defer_init` is use internally to aid the module code generator. This - causes the generated code to be able to import without the global - environment variable to be set. - - .. versionadded:: 2.4 - `defer_init` parameter added. - """ - source_hint = None - try: - if isinstance(source, str): - source_hint = source - source = self._parse(source, name, filename) - source = self._generate(source, name, filename, defer_init=defer_init) - if raw: - return source - if filename is None: - filename = "